summaryrefslogtreecommitdiffstats
path: root/ansible_collections/dellemc/openmanage/tests
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/dellemc/openmanage/tests')
-rw-r--r--ansible_collections/dellemc/openmanage/tests/.gitignore4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/README.md54
-rw-r--r--ansible_collections/dellemc/openmanage/tests/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/tests/requirements.txt9
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt7
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py284
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/__init__.py0
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py81
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py89
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py237
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py254
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py108
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py75
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py185
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py437
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py126
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py307
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py587
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py256
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py298
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py625
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py77
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py78
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py91
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py108
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py82
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py286
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py166
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py316
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py95
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py356
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py197
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py78
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py228
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py350
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py251
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py250
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py457
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py248
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py122
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py2240
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py425
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py297
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py381
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py584
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py143
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py400
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py297
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py1195
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py87
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py602
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py281
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py135
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py130
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py408
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py185
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py122
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py173
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py467
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py300
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py460
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py198
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py554
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py554
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py537
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py136
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py864
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py274
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py1346
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py106
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py243
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py210
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py195
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py436
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py547
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py95
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py699
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py1892
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py386
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py602
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py160
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py98
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py349
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py191
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py99
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py452
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py272
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py475
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py610
95 files changed, 29760 insertions, 0 deletions
diff --git a/ansible_collections/dellemc/openmanage/tests/.gitignore b/ansible_collections/dellemc/openmanage/tests/.gitignore
new file mode 100644
index 00000000..8c8e7569
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/.gitignore
@@ -0,0 +1,4 @@
+output/
+# Unit test / coverage reports
+htmlcov/
+.tox/ \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/README.md b/ansible_collections/dellemc/openmanage/tests/README.md
new file mode 100644
index 00000000..f66cdd59
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/README.md
@@ -0,0 +1,54 @@
+### Overview
+Dell EMC OpenManage Ansible Modules unit test scripts are located under
+ [unit](./tests/unit) directory.
+
+### Implementing the unit tests
+Any contribution must have an associated unit test. This section covers the
+ tests that need to be carried out.
+* The unit tests are required for each new resource, bug fix, or enhancement. They must cover what is being submitted.
+* The name of the test modules should start with the prefix "test_" in
+ addition to the tested module name. For example: test_ome_user
+
+### Prerequisites
+* Dell EMC OpenManage collections - to install run `ansible-galaxy collection
+ install dellemc.openmanage`
+* To run the unittest for iDRAC modules, install OpenManage Python Software Development Kit (OMSDK) using
+`pip install omsdk --upgrade` or from [Dell EMC OpenManage Python SDK](https://github.com/dell/omsdk)
+
+### Executing unit tests
+You can execute them manually by using any tool of your choice, like `pytest` or `ansible-test`.
+
+#### Executing with `ansible-test`
+* Clone [Ansible repository](https://github.com/ansible/ansible) from GitHub to local $ANSIBLE_DIR.
+* Copy `compat` directory from the cloned repository path.
+ `$ANSIBLE_DIR/test/units/` to the location of the installed Dell EMC OpenManage collection `$ANSIBLE_COLLECTIONS_PATHS/ansible_collections/dellemc/openmanage/tests/unit`.
+* Copy `utils.py` file from `$ANSIBLE_DIR/test/units/modules` tests location to the location of the installed collection `$ANSIBLE_COLLECTIONS_PATHS/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules`
+* Edit the copied `utils.py` to refer the above `compat` package as below:
+```python
+ from units.compat import unittest
+
+ # Replace the above lines in utils.py as below
+
+ from ansible_collections.dellemc.openmanage.tests.unit.compat import unittest
+```
+* To install `ansible-test` requirements use
+ ```
+ ansible-test units --requirements
+ ```
+* To perform a test, run the following command
+ ```
+ ansible-test units -vvv
+ ```
+* To run any specific module use the below command,
+ ```
+ ansible-test units idrac_server_config_profile
+ ```
+See [here](https://docs.ansible.com/ansible/latest/dev_guide/testing_units.html#testing-units) for more details on unit-testing.
+
+#### Executing with `pytest`
+
+See [here](https://docs.pytest.org/en/stable/).
+
+### Acceptance criteria
+The code coverage of new module should be more than 90%.
+Execute code coverage with `pytest` as explained [here](https://pytest-cov.readthedocs.io/en/latest/reporting.html). \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/__init__.py b/ansible_collections/dellemc/openmanage/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/tests/requirements.txt b/ansible_collections/dellemc/openmanage/tests/requirements.txt
new file mode 100644
index 00000000..3ea8227f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/requirements.txt
@@ -0,0 +1,9 @@
+omsdk
+pytest
+pytest-xdist==2.5.0
+mock
+pytest-mock
+pytest-cov
+# pytest-ansible==2.0.1
+coverage==4.5.4
+netaddr>=0.7.19
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt
new file mode 100644
index 00000000..f6fec0eb
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt
@@ -0,0 +1,3 @@
+tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
+plugins/modules/idrac_attributes.py compile-2.6!skip
+plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt
new file mode 100644
index 00000000..f6fec0eb
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt
@@ -0,0 +1,3 @@
+tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
+plugins/modules/idrac_attributes.py compile-2.6!skip
+plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt
new file mode 100644
index 00000000..f6fec0eb
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt
@@ -0,0 +1,3 @@
+tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
+plugins/modules/idrac_attributes.py compile-2.6!skip
+plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt
new file mode 100644
index 00000000..9d8f3ba1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt
@@ -0,0 +1,7 @@
+plugins/modules/dellemc_get_firmware_inventory.py validate-modules:deprecation-mismatch
+plugins/modules/dellemc_get_firmware_inventory.py validate-modules:invalid-documentation
+plugins/modules/dellemc_get_system_inventory.py validate-modules:deprecation-mismatch
+plugins/modules/dellemc_get_system_inventory.py validate-modules:invalid-documentation
+tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
+plugins/modules/idrac_attributes.py compile-2.6!skip
+plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/__init__.py b/ansible_collections/dellemc/openmanage/tests/unit/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/__init__.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/__init__.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
new file mode 100644
index 00000000..fc0f0be5
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
@@ -0,0 +1,284 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2022 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME
+from mock import MagicMock
+import json
+
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+
+
+class TestRestOME(object):
+
+ @pytest.fixture
+ def ome_response_mock(self, mocker):
+ set_method_result = {'json_data': {}}
+ response_class_mock = mocker.patch(
+ MODULE_UTIL_PATH + 'ome.OpenURLResponse',
+ return_value=set_method_result)
+ response_class_mock.success = True
+ response_class_mock.status_code = 200
+ return response_class_mock
+
+ @pytest.fixture
+ def mock_response(self):
+ mock_response = MagicMock()
+ mock_response.getcode.return_value = 200
+ mock_response.headers = mock_response.getheaders.return_value = {'X-Auth-Token': 'token_id'}
+ mock_response.read.return_value = json.dumps({"value": "data"})
+ return mock_response
+
+ def test_invoke_request_with_session(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = True
+ with RestOME(module_params, req_session) as obj:
+ response = obj.invoke_request("/testpath", "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = False
+ with RestOME(module_params, req_session) as obj:
+ response = obj.invoke_request("/testpath", "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session_with_header(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = False
+ with RestOME(module_params, req_session) as obj:
+ response = obj.invoke_request("/testpath", "POST", headers={"application": "octstream"})
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_with_session_connection_error(self, mocker, mock_response):
+ mock_response.success = False
+ mock_response.status_code = 500
+ mock_response.json_data = {}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = True
+ with pytest.raises(ConnectionError):
+ with RestOME(module_params, req_session) as obj:
+ obj.invoke_request("/testpath", "GET")
+
+ @pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
+ def test_invoke_request_error_case_handling(self, exc, mock_response, mocker):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ open_url_mock.side_effect = exc("test")
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = False
+ with pytest.raises(exc) as e:
+ with RestOME(module_params, req_session) as obj:
+ obj.invoke_request("/testpath", "GET")
+
+ def test_invoke_request_http_error_handling(self, mock_response, mocker):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ open_url_mock.side_effect = HTTPError('http://testhost.com/', 400,
+ 'Bad Request Error', {}, None)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = False
+ with pytest.raises(HTTPError) as e:
+ with RestOME(module_params, req_session) as obj:
+ obj.invoke_request("/testpath", "GET")
+
+ def test_get_all_report_details(self, mock_response, mocker):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 50, "value": list(range(51))}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(module_params, True) as obj:
+ reports = obj.get_all_report_details("DeviceService/Devices")
+ assert reports == {"resp_obj": mock_response, "report_list": list(range(51))}
+
+ def test_get_report_list_error_case(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ invoke_obj = mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ side_effect=HTTPError('http://testhost.com/', 400, 'Bad Request Error', {}, None))
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with pytest.raises(HTTPError) as e:
+ with RestOME(module_params, False) as obj:
+ obj.get_all_report_details("DeviceService/Devices")
+
+ @pytest.mark.parametrize("query_param", [
+ {"inp": {"$filter": "UserName eq 'admin'"}, "out": "%24filter=UserName%20eq%20%27admin%27"},
+ {"inp": {"$top": 1, "$skip": 2, "$filter": "JobType/Id eq 8"}, "out":
+ "%24top=1&%24skip=2&%24filter=JobType%2FId%20eq%208"},
+ {"inp": {"$top": 1, "$skip": 3}, "out": "%24top=1&%24skip=3"}
+ ])
+ def test_build_url(self, query_param, mocker):
+ """builds complete url"""
+ base_uri = 'https://192.168.0.1:443/api'
+ path = "AccountService/Accounts"
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME._get_base_url',
+ return_value=base_uri)
+ inp = query_param["inp"]
+ out = query_param["out"]
+ url = RestOME(module_params=module_params)._build_url(path, query_param=inp)
+ assert url == base_uri + "/" + path + "?" + out
+ assert "+" not in url
+
+ def test_get_job_type_id(self, mock_response, mocker):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 50, "value": [{"Name": "PowerChange", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ jobtype_name = "PowerChange"
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(module_params, True) as obj:
+ job_id = obj.get_job_type_id(jobtype_name)
+ assert job_id == 11
+
+ def test_get_job_type_id_null_case(self, mock_response, mocker):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 50, "value": [{"Name": "PowerChange", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ jobtype_name = "FirmwareUpdate"
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(module_params, True) as obj:
+ job_id = obj.get_job_type_id(jobtype_name)
+ assert job_id is None
+
+ def test_get_device_id_from_service_tag_ome_case01(self, mocker, mock_response):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 1, "value": [{"Name": "xyz", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ ome_default_args = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(ome_default_args, True) as obj:
+ details = obj.get_device_id_from_service_tag("xyz")
+ assert details["Id"] == 11
+ assert details["value"] == {"Name": "xyz", "Id": 11}
+
+ def test_get_device_id_from_service_tag_ome_case02(self, mocker, mock_response):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 0, "value": []}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ ome_default_args = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(ome_default_args, True) as obj:
+ details = obj.get_device_id_from_service_tag("xyz")
+ assert details["Id"] is None
+ assert details["value"] == {}
+
+ def test_get_all_items_with_pagination(self, mock_response, mocker):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {"@odata.count": 50, "value": list(range(51))}
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(module_params, True) as obj:
+ reports = obj.get_all_items_with_pagination("DeviceService/Devices")
+ assert reports == {"total_count": 50, "value": list(range(51))}
+
+ def test_get_all_items_with_pagination_error_case(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ return_value=mock_response)
+ invoke_obj = mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ side_effect=HTTPError('http://testhost.com/', 400, 'Bad Request Error', {}, None))
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with pytest.raises(HTTPError) as e:
+ with RestOME(module_params, False) as obj:
+ obj.get_all_items_with_pagination("DeviceService/Devices")
+
+ def test_get_device_type(self, mock_response, mocker):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {
+ "@odata.context": "/api/$metadata#Collection(DeviceService.DeviceType)",
+ "@odata.count": 5,
+ "value": [
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 1000,
+ "Name": "SERVER",
+ "Description": "Server Device"
+ },
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 2000,
+ "Name": "CHASSIS",
+ "Description": "Chassis Device"
+ },
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 3000,
+ "Name": "STORAGE",
+ "Description": "Storage Device"
+ },
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 4000,
+ "Name": "NETWORK_IOM",
+ "Description": "NETWORK IO Module Device"
+ },
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 8000,
+ "Name": "STORAGE_IOM",
+ "Description": "Storage IOM Device"
+ }
+ ]
+ }
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ return_value=mock_response)
+ module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ with RestOME(module_params, False) as obj:
+ type_map = obj.get_device_type()
+ assert type_map == {1000: "SERVER", 2000: "CHASSIS", 3000: "STORAGE",
+ 4000: "NETWORK_IOM", 8000: "STORAGE_IOM"}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/__init__.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/__init__.py
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py
new file mode 100644
index 00000000..0cc124f9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2022 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, AnsibleFailJson, \
+ AnsibleExitJson
+from mock import MagicMock
+import ast
+
+
+class Constants:
+ device_id1 = 1234
+ device_id2 = 4321
+ service_tag1 = "MXL1234"
+ service_tag2 = "MXL5467"
+ hostname1 = "192.168.0.1"
+ hostname2 = "192.168.0.2"
+
+
+class AnsibleFailJSonException(Exception):
+ def __init__(self, msg, **kwargs):
+ super(AnsibleFailJSonException, self).__init__(msg)
+ self.fail_msg = msg
+ self.fail_kwargs = kwargs
+
+
+class FakeAnsibleModule:
+
+ def _run_module(self, module_args, check_mode=False):
+ module_args.update({'_ansible_check_mode': check_mode})
+ set_module_args(module_args)
+ with pytest.raises(AnsibleExitJson) as ex:
+ self.module.main()
+ return ex.value.args[0]
+
+ def _run_module_with_fail_json(self, module_args):
+ set_module_args(module_args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.module.main()
+ result = exc.value.args[0]
+ return result
+
+ def execute_module(self, module_args, check_mode=False):
+ """[workaround]: generic exception handling in module will
+ be caught here and extracted the result for exit_json case"""
+ module_args.update({'_ansible_check_mode': check_mode})
+ set_module_args(module_args)
+ result = {}
+ try:
+ with pytest.raises(AnsibleExitJson) as ex:
+ self.module.main()
+ except Exception as err:
+ result = ast.literal_eval(err.args[0]['msg'])
+ return result
+
+ def get_module_mock(self, params=None, check_mode=False):
+ if params is None:
+ params = {}
+
+ def fail_func(msg, **kwargs):
+ raise AnsibleFailJSonException(msg, **kwargs)
+
+ module = MagicMock()
+ module.fail_json.side_effect = fail_func
+ module.exit_json.side_effect = fail_func
+ module.params = params
+ module.check_mode = check_mode
+ return module
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py
new file mode 100644
index 00000000..e6f9ae46
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2022 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible.module_utils import basic
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \
+ fail_json, AnsibleFailJson, AnsibleExitJson
+from mock import MagicMock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+
+
+@pytest.fixture(autouse=True)
+def module_mock(mocker):
+ return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
+
+
+@pytest.fixture
+def ome_connection_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_device_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+@pytest.fixture
+def ome_response_mock(mocker):
+ set_method_result = {'json_data': {}}
+ response_class_mock = mocker.patch(MODULE_UTIL_PATH + 'ome.OpenURLResponse', return_value=set_method_result)
+ response_class_mock.success = True
+ response_class_mock.status_code = 200
+ return response_class_mock
+
+
+@pytest.fixture
+def redfish_response_mock(mocker):
+ set_method_result = {'json_data': {}}
+ response_class_mock = mocker.patch(MODULE_UTIL_PATH + 'redfish.OpenURLResponse', return_value=set_method_result)
+ response_class_mock.success = True
+ response_class_mock.status_code = 200
+ return response_class_mock
+
+
+@pytest.fixture
+def ome_default_args():
+ default_args = {'hostname': '192.168.0.1', 'username': 'username', 'password': 'password', "ca_path": "/path/ca_bundle"}
+ return default_args
+
+
+@pytest.fixture
+def idrac_default_args():
+ default_args = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
+ "ca_path": "/path/to/ca_cert.pem"}
+ return default_args
+
+
+@pytest.fixture
+def redfish_default_args():
+ default_args = {'baseuri': '192.168.0.1', 'username': 'username', 'password': 'password',
+ "ca_path": "/path/to/ca_cert.pem"}
+ return default_args
+
+
+@pytest.fixture
+def fake_ansible_module_mock():
+ module = MagicMock()
+ module.params = {}
+ module.fail_json = AnsibleFailJson()
+ module.exit_json = AnsibleExitJson()
+ return module
+
+
+@pytest.fixture
+def default_ome_args():
+ return {"hostname": "hostname", "username": "username", "password": "password"}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py
new file mode 100644
index 00000000..0386269e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py
@@ -0,0 +1,237 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_eventing
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock, PropertyMock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestConfigureEventing(FakeAnsibleModule):
+ module = dellemc_configure_idrac_eventing
+
+ @pytest.fixture
+ def idrac_configure_eventing_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="Status")
+ type(idrac_obj).set_liason_share = Mock(return_value="Status")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_config_eventing_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_configure_idrac_eventing.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def is_changes_applicable_eventing_mock(self, mocker):
+ try:
+ changes_applicable_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_configure_idrac_eventing.config_mgr')
+ except AttributeError:
+ changes_applicable_obj = MagicMock()
+ obj = MagicMock()
+ changes_applicable_obj.is_change_applicable.return_value = obj
+ return changes_applicable_obj
+
+ @pytest.fixture
+ def idrac_connection_configure_eventing_mock(self, mocker, idrac_configure_eventing_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_eventing.iDRACConnection',
+ return_value=idrac_configure_eventing_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_eventing_mock
+ return idrac_configure_eventing_mock
+
+ def test_main_configure_eventing_success_case01(self, idrac_connection_configure_eventing_mock, idrac_default_args,
+ mocker, idrac_file_manager_config_eventing_mock):
+ idrac_default_args.update({"share_name": None, 'share_password': None, "destination_number": 1,
+ "destination": "1.1.1.1", 'share_mnt': None, 'share_user': None})
+ message = {'msg': 'Successfully configured the idrac eventing settings.',
+ 'eventing_status': {"Id": "JID_12345123456", "JobState": "Completed"},
+ 'changed': True}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC eventing settings."
+ status_msg = {"Status": "Success", "Message": "No changes found to commit!"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "No changes found to commit!"
+
+ def test_run_idrac_eventing_config_success_case01(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1, "destination": "1.1.1.1",
+ "snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
+ "email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
+ "enable_alerts": "Enabled", "authentication": "Enabled",
+ "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "password": "pwd"})
+ message = {"changes_applicable": True, "message": "Changes found to commit!"}
+ idrac_connection_configure_eventing_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert "Changes found to commit!" == ex.value.args[0]
+
+ def test_run_idrac_eventing_config_success_case02(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1, "destination": "1.1.1.1",
+ "snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
+ "email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
+ "enable_alerts": "Enabled", "authentication": "Enabled",
+ "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "password": "pwd"})
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ result = self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert result['message'] == 'changes found to commit!'
+
+ def test_run_idrac_eventing_config_success_case03(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1,
+ "destination": "1.1.1.1", "snmp_v3_username": "snmpuser",
+ "snmp_trap_state": "Enabled", "alert_number": 4, "email_alert_state": "Enabled",
+ "address": "abc@xyz", "custom_message": "test", "enable_alerts": "Enabled",
+ "authentication": "Enabled", "smtp_ip_address": "192.168.0.1", "smtp_port": 443,
+ "username": "uname", "password": "pwd"})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ result = self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert result["Message"] == 'No changes found to commit!'
+
+ def test_run_idrac_eventing_config_success_case04(self, idrac_connection_configure_eventing_mock,
+ idrac_default_args, idrac_file_manager_config_eventing_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1, "destination": "1.1.1.1",
+ "snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
+ "email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
+ "enable_alerts": "Enabled", "authentication": "Enabled",
+ "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "password": "pwd"})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ result = self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert result['Message'] == 'No changes were applied'
+
+ def test_run_idrac_eventing_config_success_case05(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": None, "destination": None,
+ "snmp_v3_username": None, "snmp_trap_state": None, "alert_number": None,
+ "email_alert_state": None, "address": None, "custom_message": None,
+ "enable_alerts": None, "authentication": None,
+ "smtp_ip_address": None, "smtp_port": None, "username": None,
+ "password": None})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "Success"}
+ obj = MagicMock()
+ idrac_connection_configure_eventing_mock.config_mgr = obj
+ type(obj).configure_snmp_trap_destination = PropertyMock(return_value=message)
+ type(obj).configure_email_alerts = PropertyMock(return_value=message)
+ type(obj).configure_idrac_alerts = PropertyMock(return_value=message)
+ type(obj).configure_smtp_server_settings = PropertyMock(return_value=message)
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ result = self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert result['Message'] == 'No changes were applied'
+
+ def test_run_idrac_eventing_config_failed_case01(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1, "destination": "1.1.1.1",
+ "snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
+ "email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
+ "enable_alerts": "Enabled", "authentication": "Enabled",
+ "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "password": "pwd"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_configure_eventing_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_eventing_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert ex.value.args[0] == 'status failed in checking Data'
+
+ def test_run_idrac_eventing_config_failed_case02(self, idrac_connection_configure_eventing_mock,
+ idrac_default_args, idrac_file_manager_config_eventing_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1, "destination": "1.1.1.1",
+ "snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
+ "email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
+ "enable_alerts": "Enabled", "authentication": "Enabled",
+ "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "password": "pwd"})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "failed"}
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ result = self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert result['Message'] == 'No changes were applied'
+
+ def test_run_idrac_eventing_config_failed_case03(self, idrac_connection_configure_eventing_mock,
+ idrac_default_args, idrac_file_manager_config_eventing_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1,
+ "destination": "1.1.1.1", "snmp_v3_username": "snmpuser",
+ "snmp_trap_state": "Enabled", "alert_number": 4, "email_alert_state": "Enabled",
+ "address": "abc@xyz", "custom_message": "test", "enable_alerts": "Enabled",
+ "authentication": "Enabled", "smtp_ip_address": "192.168.0.1",
+ "smtp_port": 443, "username": "uname", "password": "pwd"})
+ message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
+ idrac_connection_configure_eventing_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_eventing_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
+ assert ex.value.args[0] == 'Failed to found changes'
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_configure_eventing_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock):
+ idrac_default_args.update({"share_name": None, 'share_password': None,
+ 'share_mnt': None, 'share_user': None})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py
new file mode 100644
index 00000000..2606a034
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py
@@ -0,0 +1,254 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_services
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestConfigServices(FakeAnsibleModule):
+ module = dellemc_configure_idrac_services
+
+ @pytest.fixture
+ def idrac_configure_services_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="servicesstatus")
+ type(idrac_obj).set_liason_share = Mock(return_value="servicestatus")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_config_services_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_configure_idrac_services.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def is_changes_applicable_mock_services(self, mocker):
+ try:
+ changes_applicable_mock = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_configure_idrac_services.'
+ 'config_mgr')
+ except AttributeError:
+ changes_applicable_mock = MagicMock()
+ obj = MagicMock()
+ changes_applicable_mock.is_change_applicable.return_value = obj
+ return changes_applicable_mock
+
+ @pytest.fixture
+ def idrac_connection_configure_services_mock(self, mocker, idrac_configure_services_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.iDRACConnection',
+ return_value=idrac_configure_services_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_services_mock
+ return idrac_configure_services_mock
+
+ def test_main_idrac_services_config_success_Case(self, idrac_connection_configure_services_mock, idrac_default_args,
+ mocker, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.run_idrac_services_config', return_value=message)
+ with pytest.raises(Exception) as ex:
+ self._run_module(idrac_default_args)
+ assert ex.value.args[0]['msg'] == "Failed to configure the iDRAC services."
+ status_msg = {"Status": "Success", "Message": "No changes found to commit!"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.run_idrac_services_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "No changes found to commit!"
+ status_msg = {"Status": "Failed"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.run_idrac_services_config', return_value=status_msg)
+ with pytest.raises(Exception) as ex:
+ self._run_module(idrac_default_args)
+ assert ex.value.args[0]['msg'] == "Failed to configure the iDRAC services."
+
+ def test_run_idrac_services_config_success_case01(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock,
+ is_changes_applicable_mock_services):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ idrac_connection_configure_services_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert ex.value.args[0] == "Changes found to commit!"
+
+ def test_run_idrac_services_config_success_case02(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": True, "message": "changes found to commit!",
+ "Status": "Success"}
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes found to commit!', 'Status': 'Success'}
+
+ def test_run_idrac_services_config_success_case03(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert msg == {'changes_applicable': False, 'Message': 'No changes found to commit!',
+ 'changed': False, 'Status': 'Success'}
+
+ def test_run_idrac_services_config_success_case04(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert msg == {'changes_applicable': False, 'Message': 'No changes found to commit!',
+ 'changed': False, 'Status': 'Success'}
+
+ def test_run_idrac_services_config_success_case05(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": None, "http_port": None,
+ "https_port": None, "timeout": None, "ssl_encryption": None,
+ "tls_protocol": None, "snmp_enable": None,
+ "community_name": None, "snmp_protocol": None, "alert_port": None,
+ "discovery_port": None, "trap_format": None,
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_services_mock.config_mgr.configure_web_server.return_value = message
+ idrac_connection_configure_services_mock.config_mgr.configure_snmp.return_value = message
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert msg == {'changes_applicable': False, 'Message': 'No changes found to commit!',
+ 'changed': False, 'Status': 'Success'}
+
+ def test_run_idrac_services_config_failed_case01(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_configure_services_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_services_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert ex.value.args[0] == 'status failed in checking Data'
+
+ def test_run_idrac_services_config_failed_case02(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1",
+ "ipmi_lan": {"community_name": "public"}})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "failed"}
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert msg == {'changes_applicable': False, 'Message': 'No changes were applied',
+ 'changed': False, 'Status': 'failed'}
+
+ def test_run_idrac_services_config_failed_case03(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1"})
+ message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
+ idrac_connection_configure_services_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_services_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert ex.value.args[0] == "Failed to found changes"
+
+ def test_main_idrac_configure_fail_case(self, mocker, idrac_default_args, idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None})
+ message = {'changed': False, 'msg': {'Status': "failed", "message": "No changes found to commit!"}}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.run_idrac_services_config', return_value=message)
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_idrac_configure_services_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_configure_idrac_services.run_idrac_services_config', side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py
new file mode 100644
index 00000000..657f89e4
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_firmware_inventory
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, PropertyMock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestFirmware(FakeAnsibleModule):
+ module = dellemc_get_firmware_inventory
+
+ @pytest.fixture
+ def idrac_firmware_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.update_mgr = idrac_obj
+ type(idrac_obj).InstalledFirmware = PropertyMock(return_value="msg")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_get_firmware_inventory_connection_mock(self, mocker, idrac_firmware_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_get_firmware_inventory.iDRACConnection',
+ return_value=idrac_firmware_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_mock
+ return idrac_firmware_mock
+
+ def test_main_idrac_get_firmware_inventory_success_case01(self, idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware.return_value = {"Status": "Success"}
+ result = self._run_module(idrac_default_args)
+ assert result == {'ansible_facts': {
+ idrac_get_firmware_inventory_connection_mock.ipaddr: {
+ 'Firmware Inventory': idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware}},
+ "changed": False}
+
+ def test_run_get_firmware_inventory_success_case01(self, idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ obj2 = MagicMock()
+ idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
+ type(obj2).InstalledFirmware = PropertyMock(return_value="msg")
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg, err = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
+ assert msg == {'failed': False,
+ 'msg': idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware}
+ assert msg['failed'] is False
+ assert err is False
+
+ def test_run_get_firmware_inventory_failed_case01(self, idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ f_module = self.get_module_mock(params=idrac_default_args)
+ error_msg = "Error in Runtime"
+ obj2 = MagicMock()
+ idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
+ type(obj2).InstalledFirmware = PropertyMock(side_effect=Exception(error_msg))
+ msg, err = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
+ assert msg['failed'] is True
+ assert msg['msg'] == "Error: {0}".format(error_msg)
+ assert err is True
+
+ def test_run_get_firmware_inventory_failed_case02(self, idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ message = {'Status': "Failed", "Message": "Fetched..."}
+ obj2 = MagicMock()
+ idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
+ type(obj2).InstalledFirmware = PropertyMock(return_value=message)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
+ assert result == ({'msg': {'Status': 'Failed', 'Message': 'Fetched...'}, 'failed': True}, False)
+ if "Status" in result[0]['msg']:
+ if not result[0]['msg']['Status'] == "Success":
+ assert result[0]['failed'] is True
+
+ def test_main_idrac_get_firmware_inventory_faild_case01(self, idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ error_msg = "Error occurs"
+ obj2 = MagicMock()
+ idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
+ type(obj2).InstalledFirmware = PropertyMock(side_effect=Exception(error_msg))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert result['msg'] == "Error: {0}".format(error_msg)
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_idrac_get_firmware_inventory_exception_handling_case(self, exc_type, mocker,
+ idrac_get_firmware_inventory_connection_mock,
+ idrac_default_args):
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_firmware_inventory.'
+ 'run_get_firmware_inventory', side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py
new file mode 100644
index 00000000..c398c9f8
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py
@@ -0,0 +1,75 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_system_inventory
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, Mock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestSystemInventory(FakeAnsibleModule):
+ module = dellemc_get_system_inventory
+
+ @pytest.fixture
+ def idrac_system_inventory_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.get_entityjson = idrac_obj
+ type(idrac_obj).get_json_device = Mock(return_value="msg")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_get_system_inventory_connection_mock(self, mocker, idrac_system_inventory_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_get_system_inventory.iDRACConnection',
+ return_value=idrac_system_inventory_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_system_inventory_mock
+ return idrac_system_inventory_mock
+
+ def test_main_idrac_get_system_inventory_success_case01(self, idrac_get_system_inventory_connection_mock, mocker,
+ idrac_default_args):
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
+ return_value=({"msg": "Success"}, False))
+ msg = self._run_module(idrac_default_args)
+ assert msg['changed'] is False
+ assert msg['ansible_facts'] == {idrac_get_system_inventory_connection_mock.ipaddr:
+ {'SystemInventory': "Success"}}
+
+ def test_run_get_system_inventory_error_case(self, idrac_get_system_inventory_connection_mock, idrac_default_args,
+ mocker):
+ f_module = self.get_module_mock()
+ idrac_get_system_inventory_connection_mock.get_json_device = {"msg": "Success"}
+ result, err = self.module.run_get_system_inventory(idrac_get_system_inventory_connection_mock, f_module)
+ assert result["failed"] is True
+ assert err is True
+
+ def test_main_error_case(self, idrac_get_system_inventory_connection_mock, idrac_default_args, mocker):
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
+ return_value=({"msg": "Failed"}, True))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_get_system_inventory_connection_mock):
+
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
+ side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py
new file mode 100644
index 00000000..1ae8b22c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_lc_attributes
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestLcAttributes(FakeAnsibleModule):
+ module = dellemc_idrac_lc_attributes
+
+ @pytest.fixture
+ def idrac_lc_attributes_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="Status")
+ type(idrac_obj).set_liason_share = Mock(return_value="Status")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_lc_attribute_mock(self, mocker, idrac_lc_attributes_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_lc_attributes.iDRACConnection',
+ return_value=idrac_lc_attributes_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_lc_attributes_mock
+ return idrac_lc_attributes_mock
+
+ @pytest.fixture
+ def idrac_file_manager_lc_attribute_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ def test_main_lc_attributes_success_case01(self, idrac_connection_lc_attribute_mock,
+ idrac_default_args, mocker, idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, 'share_password': None,
+ 'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
+ message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ return_value=message)
+ with pytest.raises(Exception) as ex:
+ self._run_module(idrac_default_args)
+ assert ex.value.args[0]['msg'] == "Failed to configure the iDRAC LC attributes."
+ status_msg = {"Status": "Success"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC LC attributes."
+ status_msg = {"Status": "Success", "Message": "No changes were applied"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "No changes were applied"
+
+ def test_run_setup_idrac_csior_success_case01(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": "csior"})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert ex.value.args[0] == "Changes found to commit!"
+ status_msg = {"changes_applicable": False, "message": "no changes are applicable"}
+ idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = status_msg
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert ex.value.args[0] == "No changes found to commit!"
+
+ def test_run_setup_idrac_csior_success_case02(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": "scr"})
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes found to commit!',
+ 'changed': True, 'Status': 'Success'}
+
+ def test_run_setup_idrac_csior_success_case03(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": "scr"})
+ message = {"changes_applicable": True, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert msg == {'changes_applicable': True, 'Message': 'No changes found to commit!',
+ 'changed': False, 'Status': 'Success'}
+
+ def test_run_setup_csior_disable_case(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": 'Disabled'})
+ message = {"changes_applicable": True}
+ obj = MagicMock()
+ idrac_connection_lc_attribute_mock.config_mgr = obj
+ type(obj).disable_csior = Mock(return_value=message)
+ idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert ex.value.args[0] == "Changes found to commit!"
+
+ def test_run_setup_csior_enable_case(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "share_password": "sharepassword", "csior": 'Enabled'})
+ message = {"changes_applicable": True}
+ obj = MagicMock()
+ idrac_connection_lc_attribute_mock.config_mgr = obj
+ type(obj).enable_csior = Mock(return_value='Enabled')
+ idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as ex:
+ self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert ex.value.args[0] == "Changes found to commit!"
+
+ def test_run_setup_csior_failed_case01(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": "csior"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_lc_attribute_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_lc_attribute_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert ex.value.args[0] == "status failed in checking Data"
+
+ def test_run_setup_idrac_csior_failed_case03(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "csior": "scr"})
+ message = {"changes_applicable": False, "Message": "Failed to found changes", "changed": False,
+ "Status": "Failed", "failed": True}
+ idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ assert msg == {'changes_applicable': False, 'Message': 'Failed to found changes',
+ 'changed': False, 'Status': 'Failed', "failed": True}
+ assert msg['changed'] is False
+ assert msg['failed'] is True
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_lc_attribute_exception_handling_case(self, exc_type, mocker, idrac_connection_lc_attribute_mock,
+ idrac_default_args, idrac_file_manager_lc_attribute_mock):
+ idrac_default_args.update({"share_name": None, 'share_password': None,
+ 'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py
new file mode 100644
index 00000000..c3a0dff1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py
@@ -0,0 +1,437 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import os
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_storage_volume
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestStorageVolume(FakeAnsibleModule):
+ module = dellemc_idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="servicesstatus")
+ type(idrac_obj).set_liason_share = Mock(return_value="servicestatus")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.iDRACConnection',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_storage_volume_mock
+
+ @pytest.fixture
+ def idrac_file_manager_storage_volume_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ def test_main_idrac_storage_volume_success_Case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"disk_cache_policy": "Default", "capacity": 12.4, "media_type": "HDD",
+ "number_dedicated_hot_spare": 1, "protocol": "SAS", "raid_init_operation": "None",
+ "raid_reset_config": True, "read_cache_policy": "ReadAhead", "span_depth": 4,
+ "span_length": 3, "state": "create", "stripe_size": 2, "volume_type": "RAID 0",
+ "write_cache_policy": "WriteThrough"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', return_value='state')
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config', return_value={"changes_applicable": True})
+ msg = self._run_module(idrac_default_args)
+ assert msg == {'changed': True, 'msg': 'Successfully completed the create storage volume operation',
+ 'storage_status': {'changes_applicable': True}}
+ assert msg["msg"] == "Successfully completed the {0} storage volume operation".format("create")
+
+ def test_main_idrac_storage_volume_fail_Case1(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"disk_cache_policy": "Default", "capacity": 12.4, "media_type": "HDD",
+ "number_dedicated_hot_spare": 1, "protocol": "SAS", "raid_init_operation": "None",
+ "raid_reset_config": True, "read_cache_policy": "ReadAhead", "span_depth": 4,
+ "span_length": 3, "state": "create", "stripe_size": 2, "volume_type": "RAID 0",
+ "write_cache_policy": "WriteThrough"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', return_value='state')
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config', return_value={"storage_status": "pressent"})
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result == {'failed': True, 'msg': 'Failed to perform storage operation'}
+
+ def test_main_idrac_storage_volume_success_case01(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"disk_cache_policy": "Default", "capacity": 12.4, "media_type": "HDD",
+ "number_dedicated_hot_spare": 1, "protocol": "SAS", "raid_init_operation": "None",
+ "raid_reset_config": True, "read_cache_policy": "ReadAhead", "span_depth": 4,
+ "span_length": 3, "state": "create", "stripe_size": 2, "volume_type": "RAID 0",
+ "write_cache_policy": "WriteThrough"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', return_value='state')
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config', return_value={"Status": "Success",
+ "changed": True})
+ msg = self._run_module(idrac_default_args)
+ assert msg == {'changed': True, 'msg': 'Successfully completed the create storage volume operation',
+ 'storage_status': {'Status': 'Success', 'changed': True}}
+
+ def test_main_idrac_storage_volume_success_case02(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"disk_cache_policy": "Default", "capacity": 12.4, "media_type": "HDD",
+ "number_dedicated_hot_spare": 1, "protocol": "SAS", "raid_init_operation": "None",
+ "raid_reset_config": True, "read_cache_policy": "ReadAhead", "span_depth": 4,
+ "span_length": 3, "state": "create", "stripe_size": 2, "volume_type": "RAID 0",
+ "write_cache_policy": "WriteThrough"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', return_value='state')
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config',
+ return_value={"Status": "Success", "changed": False, "Message": "No changes found to commit!"})
+ msg = self._run_module(idrac_default_args)
+ assert msg == {'changed': False, 'msg': 'No changes found to commit!',
+ 'storage_status': {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False}}
+
+ def test_main_idrac_storage_volume_success_case03(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"disk_cache_policy": "Default", "capacity": 12.4,
+ "media_type": "HDD",
+ "number_dedicated_hot_spare": 1, "protocol": "SAS", "raid_init_operation": "None",
+ "raid_reset_config": True, "read_cache_policy": "ReadAhead", "span_depth": 4,
+ "span_length": 3, "state": "create", "stripe_size": 2, "volume_type": "RAID 0",
+ "write_cache_policy": "WriteThrough"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', return_value='state')
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config',
+ return_value={"Status": "Success", "changed": True, "Message": "Nooo changes found to commit!"})
+ msg = self._run_module(idrac_default_args)
+ assert msg['msg'] == "Successfully completed the create storage volume operation"
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError, TypeError])
+ def test_main_idrac_storage_volume_exception_handling_case(self, exc_type, mocker,
+ idrac_connection_storage_volume_mock,
+ idrac_default_args):
+ idrac_default_args.update({"share_name": "sharename"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume._validate_options', side_effect=exc_type('test'))
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.run_server_raid_config', side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+ # with pytest.raises(Exception) as exc:
+ # self._run_module_with_fail_json(idrac_default_args)
+ # assert exc.value.args[0] == "msg"
+
+ def test_run_server_raid_config_create_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.view_storage', return_value="view")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.create_storage', return_value="create")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.delete_storage', return_value="delete")
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.run_server_raid_config(idrac_connection_storage_volume_mock, f_module)
+ assert result == 'create'
+
+ def test_run_server_raid_config_view_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "view"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.view_storage', return_value="view")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.create_storage', return_value="create")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.delete_storage', return_value="delete")
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.run_server_raid_config(idrac_connection_storage_volume_mock, f_module)
+ assert result == 'view'
+
+ def test_run_server_raid_config_delete_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "delete"})
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.view_storage', return_value="view")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.create_storage', return_value="create")
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_idrac_storage_volume.delete_storage', return_value="delete")
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.run_server_raid_config(idrac_connection_storage_volume_mock, f_module)
+ assert result == 'delete'
+
+ def test_validate_options_controller_id_error_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": ""})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Controller ID is required." == str(ex.value)
+
+ def test_validate_options_capacity_error_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": -1.4})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "error_handling_for_negative_num", return_value=("capacity", -3.4))
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert str(("capacity", -3.4)) == str(ex.value)
+
+ def test_validate_options_strip_size_error_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": -1})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "error_handling_for_negative_num", return_value=("stripe_size", -1))
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert str(("stripe_size", -1)) == str(ex.value)
+
+ def test_validate_options_volume_error_case01(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1, "volumes": [{"drives": {"id": ["data"],
+ "location":[1]}}]})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Either {0} or {1} is allowed".format("id", "location") == str(ex.value)
+
+ def test_validate_options_volume_error_case02(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1, "volumes": [{"drives": {}}]})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Drives must be defined for volume creation." == str(ex.value)
+
+ def test_validate_create_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1,
+ "volumes": [{"drives": {'data': ""}}]})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Either {0} or {1} should be specified".format("id", "location") == str(ex.value)
+
+ def test_validate_create_success_case_volumes_stripe_size(self, idrac_connection_storage_volume_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1,
+ "volumes": [{"drives": {'location': [1]}, "stripe_size": -1}]})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "error_handling_for_negative_num", return_value=("stripe_size", -1))
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert str(("stripe_size", -1)) == str(ex.value)
+
+ def test_validate_create_success_case_volumes_capacity(self, idrac_connection_storage_volume_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1,
+ "volumes": [{"drives": {'location': [0]}, "capacity": -1.1}]})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "error_handling_for_negative_num", return_value=("capacity", -1.1))
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert str(("capacity", -1.1)) == str(ex.value)
+
+ def test_validate_option_delete_success_case01(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "delete", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1,
+ "volumes": {"drives": {"Id": "", "location": ""}, "capacity": 1.4,
+ "stripe_size": 1}})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Virtual disk name is a required parameter for remove virtual disk operations." == str(ex.value)
+
+ def test_validate_option_delete_success_case02(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"share_name": "sharename", "state": "delete", "controller_id": "XYZ123",
+ "capacity": 1.4, "stripe_size": 1,
+ "volumes": None})
+ with pytest.raises(ValueError) as ex:
+ self.module._validate_options(idrac_default_args)
+ assert "Virtual disk name is a required parameter for remove virtual disk operations." == str(ex.value)
+
+ def test_error_handling_for_negative_num(self, idrac_connection_storage_volume_mock, idrac_default_args):
+ msg = self.module.error_handling_for_negative_num("capacity", -1.0)
+ assert msg == "{0} cannot be a negative number or zero,got {1}".format("capacity", -1.0)
+
+ def test_set_liason_share_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ idrac_file_manager_storage_volume_mock):
+ idrac_default_args.update({"share_name": "sharename", "state": "delete", "share_path": "sharpath"})
+ message = {"Status": 'Failed', "Data": {'Message': "Failed to set Liason share"}}
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.tempfile.gettempdir() + os.sep
+ idrac_connection_storage_volume_mock.file_share_manager.create_share_obj.return_value = message
+ idrac_connection_storage_volume_mock.config_mgr = obj
+ obj.set_liason_share = Mock(return_value=message)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.set_liason_share(idrac_connection_storage_volume_mock, f_module)
+ assert "Failed to set Liason share" == str(ex.value)
+
+ def test_view_storage_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args):
+ idrac_default_args.update({"controller_id": "controller", "volume_id": "virtual_disk"})
+ msg = {"Status": "Success"}
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.config_mgr.RaidHelper = obj
+ obj.view_storage = Mock(return_value=msg)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.view_storage(idrac_connection_storage_volume_mock, f_module)
+ assert result == {"Status": "Success"}
+
+ def test_view_storage_failed_case(self, idrac_connection_storage_volume_mock, idrac_default_args):
+ idrac_default_args.update({"controller_id": "controller", "volume_id": "virtual_disk"})
+ msg = {"Status": "Failed", "msg": "Failed to fetch storage details"}
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.config_mgr.RaidHelper = obj
+ obj.view_storage = Mock(return_value=msg)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.view_storage(idrac_connection_storage_volume_mock, f_module)
+ assert "Failed to fetch storage details" == str(ex.value)
+
+ def test_delete_storage_case(self, idrac_connection_storage_volume_mock, idrac_default_args):
+ idrac_default_args.update({"volumes": [{"name": "nameofvolume"}]})
+ msg = {"Status": "Success"}
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.config_mgr.RaidHelper = obj
+ obj.delete_virtual_disk = Mock(return_value=msg)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.delete_storage(idrac_connection_storage_volume_mock, f_module)
+ assert result == {"Status": "Success"}
+
+ def test_create_storage_success_case01(self, idrac_connection_storage_volume_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"volumes": {"name": "volume1"}, "controller_id": "x56y"})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "multiple_vd_config", return_value={"name": "volume1", "stripe_size": 1.3})
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.config_mgr.RaidHelper = obj
+ obj.new_virtual_disk = Mock(return_value=[{"name": "volume1", "stripe_size": 1.3}])
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.create_storage(idrac_connection_storage_volume_mock, f_module)
+ assert result == [{'name': 'volume1', 'stripe_size': 1.3}]
+
+ def test_create_storage_success_case02(self, idrac_connection_storage_volume_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"volumes": None, "controller_id": "x56y"})
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_storage_volume."
+ "multiple_vd_config", return_value={"name": "volume1", "stripe_size": 1.3})
+ obj = MagicMock()
+ idrac_connection_storage_volume_mock.config_mgr.RaidHelper = obj
+ obj.new_virtual_disk = Mock(return_value=[{"name": "volume1", "stripe_size": 1.3}])
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.create_storage(idrac_connection_storage_volume_mock, f_module)
+ assert result == [{'name': 'volume1', 'stripe_size': 1.3}]
+
+ def test_multiple_vd_config_success_case(self, idrac_connection_storage_volume_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"name": "name1", "media_type": 'HDD', "protocol": "SAS", "drives": None,
+ "capacity": 2, "raid_init_operation": 'Fast', 'raid_reset_config': True,
+ "span_depth": 1, "span_length": 1, "number_dedicated_hot_spare": 0,
+ "volume_type": 'RAID 0', "disk_cache_policy": "Default",
+ "write_cache_policy": "WriteThrough", "read_cache_policy": "NoReadAhead",
+ "stripe_size": 64 * 1024})
+ result = self.module.multiple_vd_config({'name': 'volume1', 'stripe_size': 1.3, "capacity": 1,
+ "drives": {"id": "id", "location": "location"}}, "",
+ {"media_type": "HDD", "protocol": "NAS", "raid_init_operation": "Fast",
+ 'raid_reset_config': True, "span_depth": 1, "span_length": 1,
+ "number_dedicated_hot_spare": 0, "volume_type": 'RAID 0',
+ "disk_cache_policy": "Default", "write_cache_policy": "WriteThrough",
+ "read_cache_policy": "NoReadAhead", "stripe_size": 64 * 1024})
+ assert result["mediatype"] == 'HDD'
+
+ def test_multiple_vd_config_capacity_none_case(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"name": "name1", "media_type": 'HDD', "protocol": "SAS", "drives": {"id": ["id1"],
+ "location": [1]},
+ "capacity": None, "raid_init_operation": 'Fast', 'raid_reset_config': True,
+ "span_depth": 1, "span_length": 1, "number_dedicated_hot_spare": 0,
+ "volume_type": 'RAID 0', "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough", "read_cache_policy": "NoReadAhead"})
+ result = self.module.multiple_vd_config({"media_type": 'HDD', "protocol": "SAS", "drives": None,
+ "capacity": 2, "raid_init_operation": 'Fast',
+ 'raid_reset_config': True, "span_depth": 1, "span_length": 1,
+ "number_dedicated_hot_spare": 0, "volume_type": 'RAID 0',
+ "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough",
+ "read_cache_policy": "NoReadAhead"}, "", {"protocol": "SAS"})
+ assert result["mediatype"] == "HDD"
+
+ def test_multiple_vd_config_capacity_none_case02(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"name": "name1", "media_type": None, "protocol": "SAS", "drives": {"id": ["id1"]},
+ "capacity": None, "raid_init_operation": None, 'raid_reset_config': True,
+ "span_depth": 1, "span_length": 1, "number_dedicated_hot_spare": 0,
+ "volume_type": 'RAID 0', "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough", "read_cache_policy": "NoReadAhead"})
+ result = self.module.multiple_vd_config({'name': 'volume1', 'stripe_size': 1.3, "capacity": 1,
+ "drives": {"id": ["id"]}}, "",
+ {"media_type": None, "protocol": "SAS", "raid_init_operation": None,
+ 'raid_reset_config': True, "span_depth": 1, "span_length": 1,
+ "number_dedicated_hot_spare": 0, "volume_type": 'RAID 0',
+ "disk_cache_policy": "Default", "write_cache_policy": "WriteThrough",
+ "read_cache_policy": "NoReadAhead", "stripe_size": 64 * 1024})
+ assert result['Name'] == 'volume1'
+
+ def test_multiple_vd_config_capacity_none_case1(self, idrac_connection_storage_volume_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"name": "name1", "media_type": 'HDD', "protocol": "SAS", "drives": {"id": ["id1"]},
+ "capacity": None, "raid_init_operation": None, 'raid_reset_config': False,
+ "span_depth": 1, "span_length": 1, "number_dedicated_hot_spare": 0,
+ "volume_type": 'RAID 0', "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough", "read_cache_policy": "NoReadAhead"})
+ result = self.module.multiple_vd_config({"media_type": 'HDD', "protocol": "SAS", "drives": None,
+ "capacity": None, "raid_init_operation": None,
+ 'raid_reset_config': False, "span_depth": 1, "span_length": 1,
+ "number_dedicated_hot_spare": 0, "volume_type": 'RAID 0',
+ "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough",
+ "read_cache_policy": "NoReadAhead"}, "", {"protocol": "NAS"})
+ assert result["StripeSize"] == 65536
+
+ def test_multiple_vd_config_success_case02(self, idrac_connection_storage_volume_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"name": "name1", "media_type": 'HDD', "protocol": "SAS", "drives": None,
+ "capacity": 2, "raid_init_operation": 'Fast', 'raid_reset_config': True,
+ "span_depth": 1, "span_length": 1, "number_dedicated_hot_spare": 0,
+ "volume_type": 'RAID 0', "disk_cache_policy": "Default",
+ "write_cache_policy": "WriteThrough", "read_cache_policy": "NoReadAhead",
+ "stripe_size": 64 * 1024})
+ result = self.module.multiple_vd_config({'name': 'volume1', "capacity": 1,
+ "media_type": None, "protocol": None,
+ "raid_init_operation": "Fast",
+ 'raid_reset_config': False, "span_depth": 1, "span_length": 1,
+ "number_dedicated_hot_spare": 0, "volume_type": 'RAID 0',
+ "disk_cache_policy": "Default", "stripe_size": 64 * 1024,
+ "write_cache_policy": "WriteThrough",
+ "read_cache_policy": "NoReadAhead"}, "", {})
+ assert result["StripeSize"] == 65536
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py
new file mode 100644
index 00000000..768c62bf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_system_lockdown_mode
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, Mock
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestSysytemLockdownMode(FakeAnsibleModule):
+ module = dellemc_system_lockdown_mode
+
+ @pytest.fixture
+ def idrac_system_lockdown_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_system_lockdown_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.dellemc_system_lockdown_mode.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def idrac_connection_system_lockdown_mode_mock(self, mocker, idrac_system_lockdown_mock):
+ idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
+ 'dellemc_system_lockdown_mode.iDRACConnection',
+ return_value=idrac_system_lockdown_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_system_lockdown_mock
+ return idrac_system_lockdown_mock
+
+ def test_main_system_lockdown_mode_success_case01(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Enabled"})
+ message = {"Status": "Success", "msg": "Lockdown mode of the system is configured.",
+ "changed": True, "system_lockdown_status": {"Status": "Success"}}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_system_lockdown_mode.run_system_lockdown_mode',
+ return_value=message)
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = message
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Lockdown mode of the system is configured."
+
+ def test_main_system_lockdown_mode_fail_case(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Enabled"})
+ message = {"Status": "Failed", "msg": "Failed to complete the lockdown mode operations.",
+ "system_lockdown_status": {}, "failed": True, "changed": False}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_system_lockdown_mode.run_system_lockdown_mode',
+ return_value=message)
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = message
+ with pytest.raises(Exception) as ex:
+ self._run_module_with_fail_json(idrac_default_args)
+ assert ex.value.args[0]['msg'] == "Failed to complete the lockdown mode operations."
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_exception_handling_case(self, exc_type, mocker, idrac_connection_system_lockdown_mode_mock,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Enabled"})
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = {"Status": "Failed"}
+ mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_system_lockdown_mode.run_system_lockdown_mode',
+ side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+
+ def test_run_system_lockdown_mode_success_case01(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Enabled", "share_mnt": None, "share_user": None})
+ message = {"Status": "Success", "msg": "Lockdown mode of the system is configured.",
+ "changed": True, "system_lockdown_status": {"Status": "Success"}}
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_system_lockdown_mode_mock.config_mgr.enable_system_lockdown.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
+ assert msg['msg'] == "Successfully completed the lockdown mode operations."
+
+ def test_run_system_lockdown_mode_failed_case01(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Disabled", "share_mnt": None, "share_user": None})
+ message = {"Status": "failed"}
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_system_lockdown_mode_mock.config_mgr.disable_system_lockdown.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
+ assert ex.value.args[0] == 'Failed to complete the lockdown mode operations.'
+
+ def test_run_system_lockdown_mode_failed_case02(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "Enabled", "share_mnt": None, "share_user": None})
+ message = {"Status": "Failed", "Message": "message inside data"}
+ idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_system_lockdown_mode_mock.config_mgr.enable_system_lockdown.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
+ assert ex.value.args[0] == "message inside data"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py
new file mode 100644
index 00000000..d5c22523
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py
@@ -0,0 +1,307 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import os
+import tempfile
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_attributes
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+
+SUCCESS_MSG = "Successfully updated the attributes."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_MSG = "Changes found to be applied."
+SYSTEM_ID = "System.Embedded.1"
+MANAGER_ID = "iDRAC.Embedded.1"
+LC_ID = "LifecycleController.Embedded.1"
+IDRAC_URI = "/redfish/v1/Managers/{res_id}/Oem/Dell/DellAttributes/{attr_id}"
+MANAGERS_URI = "/redfish/v1/Managers"
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_attributes.'
+UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+
+@pytest.fixture
+def idrac_redfish_mock_for_attr(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestIdracAttributes(FakeAnsibleModule):
+ module = idrac_attributes
+
+ @pytest.fixture
+ def idrac_attributes_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_attributes_mock(self, mocker, idrac_attributes_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_attributes_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_attributes_mock
+ return idrac_conn_mock
+
+ @pytest.mark.parametrize("params", [{"id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
+ "uri_dict":
+ {"iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "response_attr": {"SNMP.1.AgentCommunity": "Disabled"}}])
+ def test_get_response_attr(self, params, idrac_redfish_mock_for_attr, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ diff, response_attr = self.module.get_response_attr(idrac_redfish_mock_for_attr, params["id"], params["attr"], params["uri_dict"])
+ assert response_attr.keys() == params["response_attr"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'public'},
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "response_attr": {"SNMP.1.AgentCommunity": "public"},
+ "mparams": {'idrac_attributes': {"SNMP.1.AgentCommunity": "public"}
+ }
+ }])
+ def _test_fetch_idrac_uri_attr(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr =\
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, params["res_id"])
+ assert idrac_response_attr.keys() == params["response_attr"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "response_attr": {"ThermalSettings.1.ThermalProfile": "Sound Cap"},
+ "mparams": {'system_attributes': {"ThermalSettings.1.ThermalProfile": "Sound Cap"}
+ }}])
+ def _test_fetch_idrac_uri_attr_succes_case01(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr = self.module.fetch_idrac_uri_attr(
+ idrac_redfish_mock_for_attr, f_module, params["res_id"])
+ assert system_response_attr.keys() == params["response_attr"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "response_attr": {"LCAttributes.1.AutoUpdate": "Enabled"},
+ "mparams": {'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}
+ }}])
+ def _test_fetch_idrac_uri_attr_succes_case02(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr = self.module.fetch_idrac_uri_attr(
+ idrac_redfish_mock_for_attr, f_module, params["res_id"])
+ assert lc_response_attr.keys() == params["response_attr"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "response_attr": {"SNMP.1.AgentCommunity": "Disabled"},
+ "mparams": {'idrac_attributes': {"SNMP.1.AgentCommunity": "Enabled"}
+ },
+ "system_response_attr": {},
+ "lc_response_attr": {},
+ "resp": {
+ "iDRAC": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "The request completed successfully.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "Base.1.12.Success",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "None",
+ "Severity": "OK"
+ },
+ {
+ "Message": "The operation successfully completed.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.7.SYS413",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }
+ ]
+ }
+ }}])
+ def test_update_idrac_attributes(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp = self.module.update_idrac_attributes(idrac_redfish_mock_for_attr, f_module, params["uri_dict"],
+ params["response_attr"], params["system_response_attr"],
+ params["lc_response_attr"])
+ assert resp.keys() == params["resp"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1",
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "system_response_attr": {"ThermalSettings.1.ThermalProfile": "Sound Cap"},
+ "mparams": {'system_attributes': {"ThermalSettings.1.ThermalProfile": "Sound Cap"}
+ },
+ "idrac_response_attr": {},
+ "lc_response_attr": {},
+ "resp": {
+ "System": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "The request completed successfully.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "Base.1.12.Success",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "None",
+ "Severity": "OK"
+ },
+ {
+ "Message": "The operation successfully completed.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.7.SYS413",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }
+ ]
+ }
+ }}])
+ def test_update_idrac_attributes_case01(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp = self.module.update_idrac_attributes(idrac_redfish_mock_for_attr, f_module, params["uri_dict"],
+ params["idrac_response_attr"], params["system_response_attr"],
+ params["lc_response_attr"])
+ assert resp.keys() == params["resp"].keys()
+
+ @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1",
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "lc_response_attr": {"LCAttributes.1.AutoUpdate": "Enabled"},
+ "mparams": {
+ 'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}
+ },
+ "idrac_response_attr": {},
+ "system_response_attr": {},
+ "resp": {
+ "Lifecycle Controller": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "The request completed successfully.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "Base.1.12.Success",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "None",
+ "Severity": "OK"
+ },
+ {
+ "Message": "The operation successfully completed.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.7.SYS413",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }
+ ]
+ }
+ }}])
+ def test_update_idrac_attributes_case02(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp = self.module.update_idrac_attributes(idrac_redfish_mock_for_attr, f_module, params["uri_dict"],
+ params["idrac_response_attr"], params["system_response_attr"],
+ params["lc_response_attr"])
+ assert resp.keys() == params["resp"].keys()
+
+ @pytest.mark.parametrize("params",
+ [{"json_data": {},
+ "diff": 1,
+ "uri_dict": {
+ "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
+ "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
+ "LifecycleController.Embedded.1":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
+ "system_response_attr": {"ThermalSettings.1.ThermalProfile": "Sound Cap"},
+ "mparams": {'system_attributes': {"ThermalSettings.1.ThermalProfile": "Sound Cap"}},
+ "idrac_response_attr": {},
+ "lc_response_attr": {},
+ "message": "Successfully updated the attributes."
+ }])
+ def _test_idrac_attributes(self, params, idrac_connection_attributes_mock, idrac_default_args, mocker):
+ idrac_connection_attributes_mock.success = params.get("success", True)
+ idrac_connection_attributes_mock.json_data = params.get('json_data')
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(UTILS_PATH + 'get_manager_res_id', return_value=MANAGER_ID)
+ mocker.patch(MODULE_PATH + 'fetch_idrac_uri_attr', return_value=(params["diff"],
+ params["uri_dict"],
+ params["idrac_response_attr"],
+ params["system_response_attr"],
+ params["lc_response_attr"]))
+ mocker.patch(MODULE_PATH + 'update_idrac_attributes', return_value=params["resp"])
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type", [HTTPError, URLError])
+ def _test_main_idrac_attributes_exception_handling_case(self, exc_type, idrac_connection_attributes_mock, idrac_default_args, mocker):
+ idrac_default_args.update({'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError]:
+ mocker.patch(
+ MODULE_PATH + 'update_idrac_attributes',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'update_idrac_attributes',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py
new file mode 100644
index 00000000..3ea74c90
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py
@@ -0,0 +1,587 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.2.0
+# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_bios
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_bios.'
+
+BIOS_JOB_RUNNING = "BIOS Config job is running. Wait for the job to complete."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_MSG = "Changes found to be applied."
+SUCCESS_CLEAR = "Successfully cleared the pending BIOS attributes."
+SUCCESS_COMPLETE = "Successfully applied the BIOS attributes update."
+SCHEDULED_SUCCESS = "Successfully scheduled the job for the BIOS attributes update."
+COMMITTED_SUCCESS = "Successfully committed changes. The job is in pending state. The changes will be applied {0}"
+RESET_TRIGGERRED = "Reset BIOS action triggered successfully."
+HOST_RESTART_FAILED = "Unable to restart the host. Check the host status and restart the host manually."
+BIOS_RESET_TRIGGERED = "The BIOS reset action has been triggered successfully. The host reboot is complete."
+BIOS_RESET_COMPLETE = "BIOS reset to defaults has been completed successfully."
+BIOS_RESET_PENDING = "Pending attributes to be applied. " \
+ "Clear or apply the pending changes before resetting the BIOS."
+FORCE_BIOS_DELETE = "The BIOS configuration job is scheduled. Use 'force' to delete the job."
+INVALID_ATTRIBUTES_MSG = "The values specified for the attributes are invalid."
+UNSUPPORTED_APPLY_TIME = "Apply time {0} is not supported."
+MAINTENANCE_OFFSET = "The maintenance time must be post-fixed with local offset to {0}."
+MAINTENANCE_TIME = "The specified maintenance time window occurs in the past, " \
+ "provide a future time to schedule the maintenance window."
+
+
+@pytest.fixture
+def idrac_redfish_mock_for_bios(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestConfigBios(FakeAnsibleModule):
+ module = idrac_bios
+
+ @pytest.fixture
+ def idrac_configure_bios_mock(self):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.config_mgr = idrac_obj
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_configure_bios_mock(self, mocker, idrac_configure_bios_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'iDRACConnection',
+ return_value=idrac_configure_bios_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_bios_mock
+ return idrac_configure_bios_mock
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"Attributes": {}}, 'message': NO_CHANGES_MSG,
+ "success": True, 'mparams': {'clear_pending': True}},
+ {"json_data": {"Attributes": {}}, 'message': NO_CHANGES_MSG,
+ "success": True, 'mparams': {'clear_pending': True}, "check_mode": True},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': BIOS_JOB_RUNNING,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": ("job1", "Running")},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': BIOS_JOB_RUNNING,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": ("job1", "Starting")},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': SUCCESS_CLEAR,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": ("job1", "Scheduled")},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': CHANGES_MSG,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "check_mode": True},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': CHANGES_MSG,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": ("job1", "Scheduler"), "check_mode": True},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': SUCCESS_CLEAR,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": (None, "Scheduled")},
+ {"json_data": {"Attributes": {"test": "value"}}, 'message': CHANGES_MSG,
+ "success": True, 'mparams': {'clear_pending': True},
+ "check_scheduled_bios_job": (None, "Scheduled"), "check_mode": True},
+ {"json_data": {"Attributes": {"test": "value"},
+ "Members": [
+ {"Id": "job_1", "JobType": "RAIDConfiguration", "JobState": "Scheduled"},
+ {"Id": "job_1", "JobType": "BIOSConfiguration", "JobState": "Scheduled"}]},
+ 'message': SUCCESS_CLEAR,
+ "success": True, 'mparams': {'clear_pending': True}},
+ {"json_data": {"Attributes": {"test": "value"},
+ "Members": [{"Id": "job_1", "JobType": "BIOSConfiguration", "JobState": "Running"}]},
+ 'message': BIOS_JOB_RUNNING,
+ "success": True, 'mparams': {'clear_pending': True}},
+ {"json_data": {"Attributes": {"test": "value"},
+ "Members": [{"Id": "job_1", "JobType": "BIOSConfiguration", "JobState": "Starting"}]},
+ 'message': BIOS_JOB_RUNNING,
+ "success": True, 'mparams': {'clear_pending': True}},
+ ])
+ def test_idrac_bios_clear_pending(self, params, idrac_redfish_mock_for_bios, ome_response_mock, idrac_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get('json_data')
+ mocks = ["get_pending_attributes", "check_scheduled_bios_job", "delete_scheduled_bios_job"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ idrac_default_args.update(params['mparams'])
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert result['status_msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"Attributes": {}}, 'message': BIOS_RESET_TRIGGERED,
+ "reset_host": True,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"Attributes": {"BootMode": "Uefi"}}, 'message': BIOS_RESET_PENDING,
+ "reset_host": True,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"DateTime": "2022-09-14T05:59:35-05:00",
+ "DateTimeLocalOffset": "-05:00",
+ "Members": [{"Created": "2022-09-14T05:59:20-05:00", "MessageId": "SYS1003"},
+ {"Created": "2022-09-14T05:59:10-05:00", "MessageId": "UEFI0157"},
+ {"Created": "2022-09-14T05:59:30-05:00", "MessageId": "SYS1002"}],
+ "Entries": {
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/LogServices/Lclog/Entries"
+ },
+ "Attributes": {}},
+ 'message': BIOS_RESET_TRIGGERED, "reset_host": True,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"DateTime": "2022-09-14T05:59:35-05:00",
+ "DateTimeLocalOffset": "-05:00",
+ "Members": [{"Created": "2022-09-14T05:59:20-05:00", "MessageId": "SYS1003"},
+ {"Created": "2022-09-14T05:59:10-05:00", "MessageId": "UEFI0157"},
+ {"Created": "2022-09-14T05:59:40-05:00", "MessageId": "SYS1002"}],
+ "Entries": {
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/LogServices/Lclog/Entries"
+ },
+ "Attributes": {}},
+ 'message': BIOS_RESET_COMPLETE, "reset_host": True,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"Attributes": {}}, 'message': CHANGES_MSG,
+ "reset_host": True, "check_mode": True,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"PowerState": "On"}, 'message': BIOS_RESET_TRIGGERED,
+ "success": True, 'mparams': {'reset_bios': True, "reset_type": "force_restart"}},
+ {"json_data": {"PowerState": "Off"}, 'message': "{0} {1}".format(RESET_TRIGGERRED, HOST_RESTART_FAILED),
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"PowerState": "On"}, 'message': HOST_RESTART_FAILED,
+ "get_power_state": "On", "power_act_host": False,
+ "success": True, 'mparams': {'reset_bios': True}},
+ {"json_data": {"PowerState": "On"}, 'message': HOST_RESTART_FAILED,
+ "get_power_state": "Off", "power_act_host": False,
+ "success": True, 'mparams': {'reset_bios': True}},
+ ])
+ def test_idrac_bios_reset_bios(self, params, idrac_redfish_mock_for_bios, ome_response_mock, idrac_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get('json_data')
+ mocks = ["reset_host", "get_power_state", "track_power_state", "power_act_host"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.module_utils.utils." + 'time.sleep',
+ return_value=None)
+ idrac_default_args.update(params['mparams'])
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert result['status_msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"Attributes": {"NumLock": "On"}}, 'message': NO_CHANGES_MSG,
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "On"}}},
+ {"json_data": {"Attributes": {},
+ "RegistryEntries": {
+ "Attributes": [
+ {
+ "AttributeName": "SystemModelName",
+ "ReadOnly": True,
+ "Type": "String"
+ }, {
+ "AttributeName": "MemoryMode",
+ "ReadOnly": False,
+ "Type": "Enumeration",
+ "Value": [
+ {
+ "ValueDisplayName": "Off",
+ "ValueName": "PersistentMemoryOff"
+ },
+ {
+ "ValueDisplayName": "Non-Volatile DIMM",
+ "ValueName": "NVDIMM"
+ }
+ ],
+ }, {
+ "AttributeName": "ValidEnum",
+ "ReadOnly": False,
+ "Type": "Enumeration",
+ "Value": [
+ {
+ "ValueDisplayName": "Enabled",
+ "ValueName": "On"
+ },
+ {
+ "ValueDisplayName": "Disabled",
+ "ValueName": "Off"
+ }
+ ],
+ "WriteOnly": False
+ }, {
+ "AttributeName": "IntSetting",
+ "LowerBound": 0,
+ "ReadOnly": False,
+ "Type": "Integer",
+ "UpperBound": 32,
+ }, {
+ "AttributeName": "IntSetting3",
+ "LowerBound": 0,
+ "ReadOnly": False,
+ "Type": "Integer",
+ "UpperBound": 32,
+ }, {
+ "AttributeName": "IntSetting2",
+ "LowerBound": 0,
+ "ReadOnly": False,
+ "Type": "Integer",
+ "UpperBound": 32,
+ }, ]}}, 'message': INVALID_ATTRIBUTES_MSG,
+ "reset_host": True, "get_pending_attributes": {},
+ "success": True,
+ 'mparams': {"attributes": {"NumLock": "On", "SystemModelName": "new name", "MemoryMode": "DRAM",
+ "IntSetting": 33, "IntSetting2": 'zero', "IntSetting3": 25,
+ "ValidEnum": "On"}}},
+ {"json_data": {"Attributes": {"NumLock": "On"}}, 'message': CHANGES_MSG,
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "check_mode": True,
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "InMaintenanceWindowOnReset"]}},
+ 'message': UNSUPPORTED_APPLY_TIME.format('AtMaintenanceWindowStart'),
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"},
+ "apply_time": 'AtMaintenanceWindowStart',
+ "maintenance_window": {"start_time": '"2022-09-30T05:15:40-05:00"',
+ "duration": 600}}},
+ {"json_data": {"DateTime": "2022-09-14T05:59:35-05:00",
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", 'AtMaintenanceWindowStart',
+ "InMaintenanceWindowOnReset"]}},
+ 'message': MAINTENANCE_OFFSET.format('-05:00'),
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"},
+ "apply_time": 'AtMaintenanceWindowStart',
+ "maintenance_window": {"start_time": '"2022-09-30T05:15:40-00:00"',
+ "duration": 600}}},
+ {"json_data": {"DateTime": '2022-09-30T05:15:41-05:00',
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", 'AtMaintenanceWindowStart',
+ "InMaintenanceWindowOnReset"]}},
+ 'message': MAINTENANCE_TIME,
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"},
+ "apply_time": 'AtMaintenanceWindowStart',
+ "maintenance_window": {"start_time": '2022-09-30T05:15:40-05:00',
+ "duration": 600}}},
+ {"json_data": {"DateTime": '2022-09-30T05:15:39-05:00',
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", 'AtMaintenanceWindowStart',
+ "InMaintenanceWindowOnReset"]}},
+ 'message': COMMITTED_SUCCESS.format('AtMaintenanceWindowStart'),
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"},
+ "apply_time": 'AtMaintenanceWindowStart',
+ "maintenance_window": {"start_time": '2022-09-30T05:15:40-05:00',
+ "duration": 600}}},
+ {"json_data": {"DateTime": '2022-09-30T05:15:39-05:00',
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": []}},
+ 'message': SCHEDULED_SUCCESS,
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}, 'job_wait': False}},
+ {"json_data": {"DateTime": '2022-09-30T05:15:39-05:00',
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", 'AtMaintenanceWindowStart',
+ "InMaintenanceWindowOnReset"]}},
+ 'message': SCHEDULED_SUCCESS,
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}, 'job_wait': False}},
+ {"json_data": {"DateTime": '2022-09-30T05:15:39-05:00',
+ "DateTimeLocalOffset": "-05:00",
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", 'AtMaintenanceWindowStart',
+ "InMaintenanceWindowOnReset"]}},
+ 'message': COMMITTED_SUCCESS.format('OnReset'),
+ "reset_host": True, "get_pending_attributes": {}, "validate_vs_registry": {},
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}, 'apply_time': 'OnReset'}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': BIOS_JOB_RUNNING,
+ "reset_host": True, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Running"),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': "Attributes committed but reboot has failed {0}".format(HOST_RESTART_FAILED),
+ "reset_host": False, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "apply_attributes": ("job1", True),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings":
+ {"SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': "Job Tracking Failed",
+ "reset_host": True, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "apply_attributes": ("job1", True),
+ "idrac_redfish_job_tracking": (True, "Job Tracking Failed", {}, 10),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': SUCCESS_COMPLETE,
+ "reset_host": True, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "apply_attributes": ("job1", True),
+ "idrac_redfish_job_tracking": (False, "Job Tracking Failed", {}, 10),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': SCHEDULED_SUCCESS,
+ "reset_host": True, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "apply_attributes": ("job1", True),
+ "idrac_redfish_job_tracking": (False, "Job Tracking Failed", {}, 10),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}, "job_wait": False}},
+ {"json_data": {
+ "Attributes": {"NumLock": "On"},
+ "@Redfish.Settings": {
+ "SupportedApplyTimes": ["OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]}},
+ 'message': COMMITTED_SUCCESS.format("Immediate"),
+ "reset_host": False, "get_pending_attributes": {"AssetTag": 'test'}, "validate_vs_registry": {},
+ "check_scheduled_bios_job": ("job1", "Scheduled"), "apply_attributes": (None, True),
+ "success": True, 'mparams': {"attributes": {"NumLock": "Off"}}},
+ ])
+ def test_idrac_bios_attributes(self, params, idrac_redfish_mock_for_bios, ome_response_mock, idrac_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get('json_data')
+ ome_response_mock.headers = {'Location': 'job1'}
+ mocks = ["get_current_attributes", "get_attributes_registry", "get_pending_attributes",
+ "check_scheduled_bios_job", "apply_attributes", "idrac_redfish_job_tracking",
+ "reset_host", "get_power_state", "track_power_state", "power_act_host"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.module_utils.utils." + 'time.sleep',
+ return_value=None)
+ idrac_default_args.update(params['mparams'])
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert result['status_msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, HTTPError])
+ def test_main_idrac_config_bios_exception_handling_case(self, exc_type, mocker,
+ idrac_connection_configure_bios_mock,
+ idrac_default_args):
+ idrac_default_args.update({"share_name": "sharename"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'run_server_bios_config',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'run_server_bios_config',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
+
+ def test_run_idrac_bios_config_success_case01(self, idrac_connection_configure_bios_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "message of validate params"))
+ idrac_connection_configure_bios_mock.config_mgr.is_change_applicabl.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
+
+ def test_run_idrac_bios_config_success_case02(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {"changes_applicable": True, "Status": "Success", "message": "changes found to commit!"}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "message of validate params"))
+ idrac_connection_configure_bios_mock.config_mgr.is_change_applicabl.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'Status': 'Success',
+ 'changes_applicable': True,
+ 'message': 'changes found to commit!'}
+
+ def test_run_idrac_bios_config_success_case03(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {"changes_applicable": False, "Status": "Success", "Message": "No changes found to commit!"}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "message of validate params"))
+ idrac_connection_configure_bios_mock.config_mgr.is_change_applicabl.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changes_applicable': False}
+
+ def test_run_idrac_bios_config_success_case04(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {"changes_applicable": False, "Status": "Success", "Message": "No changes found to apply."}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "message of validate params"))
+ idrac_connection_configure_bios_mock.config_mgr.is_change_applicabl.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'Message': 'No changes found to apply.',
+ 'Status': 'Success',
+ 'changes_applicable': False}
+
+ def test_run_idrac_bios_config_bootmode_failed_case0(self, idrac_connection_configure_bios_mock,
+ idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {"changes_applicable": False, "Status": "failed", "Message": "No changes found to apply."}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "message of validate params"))
+ idrac_connection_configure_bios_mock.config_mgr.is_change_applicabl.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'Message': 'No changes found to apply.',
+ 'Status': 'failed',
+ 'changes_applicable': False}
+
+ def test_run_idrac_bios_config_errorhandle_failed_case0(self, idrac_connection_configure_bios_mock,
+ idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(True, "Error occurs"))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources()
+
+ def test_run_idrac_bios_config_status_failed_case01(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {'Status': 'Failed', 'Message': 'message of validate params'}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(True, "Error occurs"))
+ idrac_connection_configure_bios_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources()
+
+ def test_run_idrac_bios_config_status_success_case01(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({"boot_sources": "bootsources",
+ "attributes": {"boot_mode": "BootMode", "nvme_mode": "NvmeMode"}})
+ message = {'Status': 'Successs', 'Message': 'message of validate params'}
+ mocker.patch(MODULE_PATH +
+ '_validate_params', return_value=(False, "Error did not occurs"))
+ idrac_connection_configure_bios_mock.config_mgr.configure_bios.return_value = message
+ idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == {'Message': 'message of validate params', 'Status': 'Successs'}
+
+ def test_run_bios_config_status_boot_sources_failed_case(self, idrac_connection_configure_bios_mock, mocker,
+ idrac_default_args):
+ idrac_default_args.update({"boot_sources": "bootsources"})
+ message = {'Status': 'Failed', "Data": {'Message': 'message of validate params'}}
+ idrac_connection_configure_bios_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_server_bios_config(idrac_connection_configure_bios_mock, f_module)
+ assert msg == idrac_connection_configure_bios_mock.config_mgr.configure_boot_sources()
+
+ def test__validate_params_error_keys_case(self, idrac_connection_configure_bios_mock, idrac_default_args,
+ mocker):
+ idrac_default_args.update({})
+ attr = [{"name": "Name"}, {"index": "Index"}, {"enabled": "Enabled"}]
+ msg = self.module._validate_params(attr)
+ assert msg == "attribute keys must be one of the ['Name', 'Index', 'Enabled']."
+
+ def test__validate_params_check_params_case(self, idrac_connection_configure_bios_mock, mocker,
+ idrac_default_args):
+ mocker.patch(MODULE_PATH +
+ 'check_params', return_value=(True, "Error occurs in check params"))
+ attr = [{"name": "name1"}, {"Index": "index1"}]
+ msg = self.module._validate_params(attr)
+ assert msg == "attribute keys must be one of the ['Name', 'Index', 'Enabled']."
+
+ def test__validate_params_empty_params_case(self, idrac_connection_configure_bios_mock, mocker,
+ idrac_default_args):
+ mocker.patch(MODULE_PATH +
+ '_validate_name_index_duplication', return_value=(True, "Error occurs in "
+ "validate name"))
+ msg = self.module._validate_params([])
+ assert msg == (True, 'Error occurs in validate name')
+
+ def test__validate_name_index_duplication_error_true_case(self, idrac_connection_configure_bios_mock,
+ idrac_default_args):
+ result = self.module._validate_name_index_duplication([{"Name": "Name1"}, {"Name": "Name1"}])
+ assert result == 'duplicate name Name1'
+
+ def test__validate_name_index_duplication_error_false_case(self, idrac_connection_configure_bios_mock,
+ idrac_default_args):
+ result = self.module._validate_name_index_duplication([{"Name": "Name1"}, {"Name": "Name2"}])
+ assert result == ''
+
+ def test_check_params_false_case(self, idrac_connection_configure_bios_mock, idrac_default_args):
+ result = self.module.check_params({"required": False}, [{"name": "Name1", "required": False},
+ {"name": "Name2", "required": False}])
+ assert result == ''
+
+ @pytest.mark.parametrize("params", [
+ {"each": {"Name": 1}, 'message': "Name must be of type: <class 'str'>. 1 (<class 'int'>) provided."},
+ {"each": {"Index": "one"}, 'message': "Index must be of type: <class 'int'>. one (<class 'str'>) provided."},
+ {"each": {"Index": -1}, 'message': "Index must be greater than or equal to: 0"},
+ {"each": {"Name": 'test', "Index": 1}, 'message': ""},
+ {"each": {"Enabled": "one"}, 'message': "Enabled must be of type: <class 'bool'>. one (<class 'str'>) provided."},
+ ])
+ def test_check_params_required_true_case(self, idrac_connection_configure_bios_mock, params,
+ idrac_default_args):
+ fields = [
+ {"name": "Name", "type": str, "required": True},
+ {"name": "Index", "type": int, "required": False, "min": 0},
+ {"name": "Enabled", "type": bool, "required": False}
+ ]
+ result = self.module.check_params(params.get('each'), fields)
+ assert result == params.get('message')
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
new file mode 100644
index 00000000..2e754888
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
@@ -0,0 +1,256 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_boot
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from mock import PropertyMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def boot_connection_mock(mocker, redfish_response_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'idrac_boot.iDRACRedfishAPI')
+ idrac_conn_mock_obj = idrac_conn_mock.return_value.__enter__.return_value
+ idrac_conn_mock_obj.invoke_request.return_value = redfish_response_mock
+ return idrac_conn_mock_obj
+
+
+class TestConfigBios(FakeAnsibleModule):
+
+ module = idrac_boot
+
+ def test_get_response_attributes(self, boot_connection_mock, redfish_response_mock, idrac_default_args):
+ idrac_default_args.update({"boot_options": {"display_name": "Boot001", "enabled": True}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Boot": {
+ "BootOptions": "", "Certificates": "", "BootOrder": [], "BootOrder@odata.count": 1,
+ "BootSourceOverrideEnabled": "Disabled", "BootSourceOverrideMode": "Legacy",
+ "BootSourceOverrideTarget": "None", "UefiTargetBootSourceOverride": None,
+ "BootSourceOverrideTarget@Redfish.AllowableValues": []},
+ "Actions": {"#ComputerSystem.Reset": {"ResetType@Redfish.AllowableValues": ["GracefulShutdown"]}}}
+ result = self.module.get_response_attributes(f_module, boot_connection_mock, "System.Embedded.1")
+ assert result["BootSourceOverrideEnabled"] == "Disabled"
+ redfish_response_mock.json_data["Boot"].pop("BootOptions", None)
+ with pytest.raises(Exception) as err:
+ self.module.get_response_attributes(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "The system does not support the BootOptions feature."
+
+ def test_get_existing_boot_options(self, boot_connection_mock, redfish_response_mock, idrac_default_args):
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Members": [
+ {"@odata.context": "/redfish/v1/$metadata#BootOption.BootOption",
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/BootOptions/HardDisk.List.1-1",
+ "@odata.type": "#BootOption.v1_0_4.BootOption", "BootOptionEnabled": True,
+ "BootOptionReference": "HardDisk.List.1-1",
+ "Description": "Current settings of the Legacy Boot option",
+ "DisplayName": "Hard drive C:", "Id": "HardDisk.List.1-1", "Name": "Legacy Boot option"}]}
+ resp_data = {'Members': [{
+ 'BootOptionEnabled': True, 'BootOptionReference': 'HardDisk.List.1-1',
+ 'Description': 'Current settings of the Legacy Boot option',
+ 'DisplayName': 'Hard drive C:', 'Id': 'HardDisk.List.1-1',
+ 'Name': 'Legacy Boot option'}]}
+ result = self.module.get_existing_boot_options(boot_connection_mock, "System.Embedded.1")
+ assert result == resp_data
+
+ def test_system_reset(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'idrac_boot.idrac_system_reset', return_value=(True, False, "Completed", {}))
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "reset_type": "graceful_restart"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ reset, track_failed, reset_msg, resp_data = self.module.system_reset(f_module, boot_connection_mock,
+ "System.Embedded.1")
+ assert reset is True
+
+ def test_get_scheduled_job(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'idrac_boot.time', return_value=None)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Members": [{
+ "Description": "Job Instance", "EndTime": "TIME_NA", "Id": "JID_609237056489", "JobState": "Scheduled",
+ "JobType": "BIOSConfiguration", "Message": "Job scheduled successfully.", "MessageArgs": [],
+ "MessageId": "PR19", "Name": "Configure: BIOS.Setup.1-1", "PercentComplete": 10}]}
+ status, job = self.module.get_scheduled_job(boot_connection_mock)
+ assert status is True
+
+ def test_configure_boot_options(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "none",
+ "job_wait_timeout": 900})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(True, {}))
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert err.value.args[0] == "Unable to complete the request because the BIOS configuration job already " \
+ "exists. Wait for the pending job to complete."
+ redfish_response_mock.status_code = 202
+ redfish_response_mock.success = True
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"}
+ redfish_response_mock.json_data = {"Attributes": {"BootSeq": [{"Name": "Boot001", "Id": 0, "Enabled": True},
+ {"Name": "Boot000", "Id": 1, "Enabled": True}]}}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(False, {}))
+ mocker.patch(MODULE_PATH + 'idrac_boot.idrac_system_reset', return_value=(False, False, "Completed", {}))
+ mocker.patch(MODULE_PATH + 'idrac_boot.wait_for_idrac_job_completion',
+ return_value=({}, "This job is not complete after 900 seconds."))
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert err.value.args[0] == "This job is not complete after 900 seconds."
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "UEFI", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ idrac_default_args.update({"boot_source_override_mode": "legacy"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ redfish_response_mock.json_data = {"Attributes": {"UefiBootSeq": [
+ {"Name": "Boot001", "Id": 0, "Enabled": True}, {"Name": "Boot000", "Id": 1, "Enabled": True}]}}
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert err.value.args[0] == "This job is not complete after 900 seconds."
+
+ def test_apply_boot_settings(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "none",
+ "job_wait_timeout": 900})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ payload = {"Boot": {"BootSourceOverrideMode": "UEFI"}}
+ redfish_response_mock.success = True
+ redfish_response_mock.status_code = 200
+ mocker.patch(MODULE_PATH + 'idrac_boot.idrac_system_reset', return_value=(False, False, "Completed", {}))
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(True, [{"Id": "JID_123456789"}]))
+ mocker.patch(MODULE_PATH + 'idrac_boot.wait_for_idrac_job_completion',
+ return_value=({}, "This job is not complete after 900 seconds."))
+ with pytest.raises(Exception) as err:
+ self.module.apply_boot_settings(f_module, boot_connection_mock, payload, "System.Embedded.1")
+ assert err.value.args[0] == "This job is not complete after 900 seconds."
+
+ def test_configure_boot_settings(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_order": ["Boot005", "Boot001"], "job_wait": True, "reset_type": "none",
+ "job_wait_timeout": 900, "boot_source_override_mode": "uefi",
+ "boot_source_override_enabled": "once", "boot_source_override_target": "cd",
+ "uefi_target_boot_source_override": "test_uefi_path"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Invalid boot order reference provided."
+ idrac_default_args.update({"boot_order": ["Boot001", "Boot001"]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Duplicate boot order reference provided."
+ mocker.patch(MODULE_PATH + 'idrac_boot.apply_boot_settings', return_value={"JobStatus": "Completed"})
+ idrac_default_args.update({"boot_order": ["Boot001", "Boot003", "Boot002"]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert result["JobStatus"] == "Completed"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Changes found to be applied."
+
+ def test_configure_idrac_boot(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"job_wait": True, "reset_type": "none", "job_wait_timeout": 900,
+ "boot_options": [{"boot_option_reference": "HardDisk.List.1-1", "enabled": True}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ boot_return_data = {"Members": [{"BootOptionEnabled": False, "BootOptionReference": "HardDisk.List.1-1",
+ "Description": "Current settings of the Legacy Boot option",
+ "DisplayName": "Hard drive C:", "Id": "HardDisk.List.1-1",
+ "Name": "Legacy Boot option", "UefiDevicePath": "VenHw(D6C0639F-823DE6)"}],
+ "Name": "Boot Options Collection", "Description": "Collection of BootOptions"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_existing_boot_options', return_value=boot_return_data)
+ mocker.patch(MODULE_PATH + 'idrac_boot.configure_boot_options', return_value={"JobType": "Completed"})
+ mocker.patch(MODULE_PATH + 'idrac_boot.configure_boot_settings', return_value={"JobType": "Completed"})
+ result = self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert result["JobType"] == "Completed"
+ idrac_default_args.update({"boot_options": [{"boot_option_reference": "HardDisk.List.1-2", "enabled": True}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as err:
+ self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Invalid boot_options provided."
+ idrac_default_args.update({"boot_options": [{"boot_option_reference": "HardDisk.List.1-1", "enabled": True},
+ {"boot_option_reference": "HardDisk.List.1-1", "enabled": True}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as err:
+ self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Duplicate boot_options provided."
+ idrac_default_args.update({"boot_options": [{"boot_option_reference": "HardDisk.List.1-1", "enabled": False}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "No changes found to be applied."
+ idrac_default_args.update({"boot_options": [{"boot_option_reference": "HardDisk.List.1-1", "enabled": True}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Changes found to be applied."
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError])
+ def test_main_exception(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker, exc_type):
+ idrac_default_args.update({"boot_source_override_mode": "legacy"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id', side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
+
+ def test_manin_success(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "legacy"})
+ redfish_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id', return_value=("System.Embedded.1", ""))
+ job_resp = {"Description": "Job Instance", "EndTime": "TIME_NA", "Id": "JID_609237056489",
+ "JobState": "Completed", "JobType": "BIOSConfiguration", "MessageId": "PR19",
+ "Message": "Job scheduled successfully.", "MessageArgs": [],
+ "Name": "Configure: BIOS.Setup.1-1", "PercentComplete": 100}
+ mocker.patch(MODULE_PATH + 'idrac_boot.configure_idrac_boot', return_value=job_resp)
+ boot_return_data = {"Members": [{"BootOptionEnabled": False, "BootOptionReference": "HardDisk.List.1-1",
+ "Description": "Current settings of the Legacy Boot option",
+ "DisplayName": "Hard drive C:", "Id": "HardDisk.List.1-1",
+ "Name": "Legacy Boot option", "UefiDevicePath": "VenHw(D6C0639F-823DE6)"}],
+ "Name": "Boot Options Collection", "Description": "Collection of BootOptions"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_existing_boot_options', return_value=boot_return_data)
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully updated the boot settings."
+
+ def test_main_res_id_error(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "legacy"})
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id', return_value=("System.Embedded.5", "Failed"))
+ with pytest.raises(Exception) as err:
+ self._run_module(idrac_default_args)
+ assert err.value.args[0]["msg"] == "Failed"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
new file mode 100644
index 00000000..c5ee0dc8
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
@@ -0,0 +1,298 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.5.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import os
+import tempfile
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_certificates
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+
+NOT_SUPPORTED_ACTION = "Certificate {op} not supported for the specified certificate type {certype}."
+SUCCESS_MSG = "Successfully performed the '{command}' operation."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_MSG = "Changes found to be applied."
+NO_RESET = " Reset iDRAC to apply new certificate. Until iDRAC is reset, the old certificate will be active."
+RESET_UNTRACK = " iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
+RESET_SUCCESS = " iDRAC has been reset successfully."
+RESET_FAIL = " Unable to reset the iDRAC. For changes to reflect, manually reset the iDRAC."
+SYSTEM_ID = "System.Embedded.1"
+MANAGER_ID = "iDRAC.Embedded.1"
+SYSTEMS_URI = "/redfish/v1/Systems"
+MANAGERS_URI = "/redfish/v1/Managers"
+IDRAC_SERVICE = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService"
+CSR_SSL = "/redfish/v1/CertificateService/Actions/CertificateService.GenerateCSR"
+IMPORT_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"
+EXPORT_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate"
+RESET_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg"
+IDRAC_RESET = "/redfish/v1/Managers/{res_id}/Actions/Manager.Reset"
+idrac_service_actions = {
+ "#DelliDRACCardService.DeleteCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.DeleteCertificate",
+ "#DelliDRACCardService.ExportCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportCertificate",
+ "#DelliDRACCardService.ExportSSLCertificate": EXPORT_SSL,
+ "#DelliDRACCardService.FactoryIdentityCertificateGenerateCSR":
+ "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityCertificateGenerateCSR",
+ "#DelliDRACCardService.FactoryIdentityExportCertificate":
+ "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityExportCertificate",
+ "#DelliDRACCardService.FactoryIdentityImportCertificate":
+ "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityImportCertificate",
+ "#DelliDRACCardService.GenerateSEKMCSR": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.GenerateSEKMCSR",
+ "#DelliDRACCardService.ImportCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportCertificate",
+ "#DelliDRACCardService.ImportSSLCertificate": IMPORT_SSL,
+ "#DelliDRACCardService.SSLResetCfg": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg",
+ "#DelliDRACCardService.iDRACReset": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.iDRACReset"
+}
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_certificates.'
+
+
+@pytest.fixture
+def idrac_redfish_mock_for_certs(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestIdracCertificates(FakeAnsibleModule):
+ module = idrac_certificates
+
+ @pytest.fixture
+ def idrac_certificates_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_certificates_mock(self, mocker, idrac_certificates_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_certificates_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_certificates_mock
+ return idrac_conn_mock
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"CertificateFile": b'Hello world!', "@Message.ExtendedInfo": [
+ {
+ "Message": "Successfully exported SSL Certificate.",
+ "MessageId": "IDRAC.2.5.LC067",
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }]}, 'message': SUCCESS_MSG.format(command="export"), "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'export', 'certificate_type': "HTTPS", 'certificate_path': tempfile.gettempdir(),
+ 'reset': False}},
+ {"json_data": {"CertificateFile": b'Hello world!'}, 'message': CHANGES_MSG, "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS), 'check_mode': True,
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'reset': False}},
+ {"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), NO_RESET), "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'reset': False}},
+ {"json_data": {}, 'message': SUCCESS_MSG.format(command="generate_csr"),
+ "success": True,
+ "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'generate_csr', 'certificate_type': "HTTPS", 'certificate_path': tempfile.gettempdir(),
+ 'cert_params': {
+ "common_name": "dell",
+ "country_code": "IN",
+ "email_address": "dell@dell.com",
+ "locality_name": "Bangalore",
+ "organization_name": "Dell",
+ "organization_unit": "ansible",
+ "state_name": "Karnataka",
+ "subject_alt_name": [
+ "emc"
+ ]}}},
+ {"json_data": {}, 'message': NOT_SUPPORTED_ACTION.format(op="generate_csr", certype="CA"),
+ "success": True,
+ "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'generate_csr', 'certificate_type': "CA", 'certificate_path': tempfile.gettempdir(),
+ 'cert_params': {
+ "common_name": "dell",
+ "country_code": "IN",
+ "email_address": "dell@dell.com",
+ "locality_name": "Bangalore",
+ "organization_name": "Dell",
+ "organization_unit": "ansible",
+ "state_name": "Karnataka",
+ "subject_alt_name": [
+ "emc"
+ ]}}},
+ {"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), RESET_SUCCESS),
+ "success": True,
+ "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "CA", 'passphrase': 'myphrase',
+ 'certificate_path': '.p12'}},
+ {"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), RESET_SUCCESS),
+ "success": True,
+ "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem'}},
+ {"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), RESET_SUCCESS),
+ "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem'}},
+ {"json_data": {}, 'message': SUCCESS_MSG.format(command="export"), "success": True, "get_cert_url": "url",
+ 'mparams': {'command': 'export', 'certificate_type': "HTTPS", 'certificate_path': tempfile.gettempdir()}},
+ {"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="reset"), RESET_SUCCESS),
+ "success": True, "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'reset', 'certificate_type': "HTTPS"}
+ }
+ ])
+ def test_idrac_certificates(self, params, idrac_connection_certificates_mock, idrac_default_args, mocker):
+ idrac_connection_certificates_mock.success = params.get("success", True)
+ idrac_connection_certificates_mock.json_data = params.get('json_data')
+ if params.get('mparams').get('certificate_path') and params.get('mparams').get('command') == 'import':
+ sfx = params.get('mparams').get('certificate_path')
+ temp = tempfile.NamedTemporaryFile(suffix=sfx, delete=False)
+ temp.write(b'Hello')
+ temp.close()
+ params.get('mparams')['certificate_path'] = temp.name
+ mocker.patch(MODULE_PATH + 'get_res_id', return_value=MANAGER_ID)
+ mocker.patch(MODULE_PATH + 'get_idrac_service', return_value=IDRAC_SERVICE.format(res_id=MANAGER_ID))
+ mocker.patch(MODULE_PATH + 'get_actions_map', return_value=idrac_service_actions)
+ # mocker.patch(MODULE_PATH + 'get_cert_url', return_value=params.get('get_cert_url'))
+ # mocker.patch(MODULE_PATH + 'write_to_file', return_value=params.get('write_to_file'))
+ mocker.patch(MODULE_PATH + 'reset_idrac', return_value=params.get('reset_idrac'))
+ idrac_default_args.update(params.get('mparams'))
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ if params.get('mparams').get('command') == 'import' and params.get('mparams').get(
+ 'certificate_path') and os.path.exists(temp.name):
+ os.remove(temp.name)
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{"json_data": {"Members": [{"@odata.id": '/redfish/v1/Mangers/iDRAC.1'}]},
+ "certype": 'Server', "res_id": "iDRAC.1"},
+ {"json_data": {"Members": []},
+ "certype": 'Server', "res_id": MANAGER_ID}
+ ])
+ def test_res_id(
+ self, params, idrac_redfish_mock_for_certs, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ res_id = self.module.get_res_id(idrac_redfish_mock_for_certs, params.get('certype'))
+ assert res_id == params['res_id']
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "Links": {
+ "Oem": {
+ "Dell": {
+ "DelliDRACCardService": {
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService"
+ }}}},
+ "VirtualMedia": {
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia"}
+ },
+ "idrac_srv": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService', "res_id": "iDRAC.1"},
+ {"json_data": {"Members": []},
+ "idrac_srv": '/redfish/v1/Dell/Managers/iDRAC.Embedded.1/DelliDRACCardService', "res_id": MANAGER_ID}
+ ])
+ def test_get_idrac_service(
+ self, params, idrac_redfish_mock_for_certs, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ idrac_srv = self.module.get_idrac_service(idrac_redfish_mock_for_certs, params.get('res_id'))
+ assert idrac_srv == params['idrac_srv']
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "Actions": {
+ "#DelliDRACCardService.ExportSSLCertificate": {
+ "SSLCertType@Redfish.AllowableValues": ["CA", "CSC", "ClientTrustCertificate", "Server"],
+ "target":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate"
+ },
+ "#DelliDRACCardService.ImportSSLCertificate": {
+ "CertificateType@Redfish.AllowableValues": ["CA", "CSC", "ClientTrustCertificate", "Server"],
+ "target":
+ "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"
+ },
+ "#DelliDRACCardService.SSLResetCfg": {
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg"
+ },
+ },
+ },
+ "idrac_service_uri": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService',
+ "actions": {
+ '#DelliDRACCardService.ExportSSLCertificate':
+ '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate',
+ '#DelliDRACCardService.ImportSSLCertificate':
+ '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate',
+ '#DelliDRACCardService.SSLResetCfg':
+ '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg'}},
+ {"json_data": {"Members": []},
+ "idrac_service_uri": '/redfish/v1/Dell/Managers/iDRAC.Embedded.1/DelliDRACCardService',
+ "actions": idrac_service_actions}
+ ])
+ def test_get_actions_map(
+ self, params, idrac_redfish_mock_for_certs, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ actions = self.module.get_actions_map(idrac_redfish_mock_for_certs, params.get('idrac_service_uri'))
+ assert actions == params['actions']
+
+ @pytest.mark.parametrize("params", [{"actions": {}, "op": "generate_csr",
+ "certype": 'Server', "res_id": "iDRAC.1",
+ "dynurl": "/redfish/v1/CertificateService/Actions/CertificateService.GenerateCSR"},
+ {"actions": {}, "op": "import",
+ "certype": 'Server', "res_id": "iDRAC.1",
+ "dynurl": "/redfish/v1/Dell/Managers/iDRAC.1/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"}
+ ])
+ def test_get_cert_url(self, params):
+ dynurl = self.module.get_cert_url(params.get('actions'), params.get('op'), params.get('certype'),
+ params.get('res_id'))
+ assert dynurl == params['dynurl']
+
+ @pytest.mark.parametrize("params", [
+ {"cert_data": {"CertificateFile": 'Hello world!',
+ "@Message.ExtendedInfo": [{
+ "Message": "Successfully exported SSL Certificate.",
+ "MessageId": "IDRAC.2.5.LC067",
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"}
+ ]},
+ "result": {'@Message.ExtendedInfo': [
+ {'Message': 'Successfully exported SSL Certificate.',
+ 'MessageId': 'IDRAC.2.5.LC067',
+ 'Resolution': 'No response action is required.',
+ 'Severity': 'Informational'}]},
+ "mparams": {'command': 'export', 'certificate_type': "HTTPS",
+ 'certificate_path': tempfile.gettempdir(), 'reset': False}}])
+ def test_format_output(self, params, idrac_default_args):
+ idrac_default_args.update(params.get('mparams'))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.format_output(f_module, params.get('cert_data'))
+ if os.path.exists(result.get('certificate_path')):
+ os.remove(result.get('certificate_path'))
+ assert 'result' not in result
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError, ImportError, RuntimeError])
+ def test_main_exceptions(self, exc_type, idrac_connection_certificates_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"command": "export", "certificate_path": "mypath"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "get_res_id",
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + "get_res_id",
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py
new file mode 100644
index 00000000..c30ce409
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py
@@ -0,0 +1,625 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_firmware
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock, patch, Mock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestidracFirmware(FakeAnsibleModule):
+ module = idrac_firmware
+
+ @pytest.fixture
+ def idrac_firmware_update_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.update_mgr = idrac_obj
+ idrac_obj.update_from_repo = Mock(return_value={
+ "update_status": {
+ "job_details": {
+ "Data": {
+ "StatusCode": 200,
+ "body": {
+ "PackageList": [{}]
+ }
+ }
+ }
+ }
+ })
+ idrac_obj.update_from_repo_url = Mock(return_value={"job_details": {"Data": {"StatusCode": 200,
+ "body": {"PackageList": [
+ {}]
+ }
+ }
+ }
+ })
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_firmware_job_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.job_mgr = idrac_obj
+ idrac_obj.get_job_status_redfish = Mock(return_value={
+ "update_status": {
+ "job_details": {
+ "Data": {
+ "StatusCode": 200,
+ "body": {
+ "PackageList": [{}]
+ }
+ }
+ }
+ }
+ })
+ idrac_obj.job_wait = Mock(return_value="21543")
+ return idrac_obj
+
+ @pytest.fixture
+ def re_match_mock(self, mocker):
+ try:
+ re_mock = mocker.patch(
+ MODULE_PATH + 'idrac_firmware.re')
+ except AttributeError:
+ re_mock = MagicMock()
+ obj = MagicMock()
+ re_mock.match.group.return_value = obj
+ return "3.30"
+
+ @pytest.fixture
+ def ET_convert_mock(self, mocker):
+ try:
+ ET_mock = mocker.patch(
+ MODULE_PATH + 'idrac_firmware.ET')
+ except AttributeError:
+ ET_mock = MagicMock()
+ obj = MagicMock()
+ ET_mock.fromstring.return_value = obj
+ return ET_mock
+
+ @pytest.fixture
+ def fileonshare_idrac_firmware_mock(self, mocker):
+ share_mock = mocker.patch(MODULE_PATH + 'idrac_firmware.FileOnShare',
+ return_value=MagicMock())
+ return share_mock
+
+ @pytest.fixture
+ def idrac_connection_firmware_mock(self, mocker, idrac_firmware_update_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_firmware.iDRACConnection',
+ return_value=idrac_firmware_update_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_update_mock
+ return idrac_firmware_update_mock
+
+ @pytest.fixture
+ def idrac_connection_firmware_redfish_mock(self, mocker, idrac_firmware_job_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_firmware.iDRACRedfishAPI',
+ return_value=idrac_firmware_job_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_job_mock
+ return idrac_firmware_job_mock
+
+ def test_main_idrac_firmware_success_case(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
+ "share_user": "sharename", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": True
+ })
+ message = {"Status": "Success", "update_msg": "Successfully updated the firmware.",
+ "update_status": "Success", 'changed': False, 'failed': False}
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {}
+ mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_redfish', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully updated the firmware.', 'update_status': 'Success',
+ 'changed': False, 'failed': False}
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError])
+ def test_main_idrac_firmware_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_firmware_redfish_mock,
+ idrac_connection_firmware_mock):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
+ "share_user": "sharename", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": True
+ })
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ mocker.patch(MODULE_PATH +
+ 'idrac_firmware._validate_catalog_file', return_value="catalog_file_name")
+ mocker.patch(MODULE_PATH +
+ 'idrac_firmware.update_firmware_omsdk', side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+
+ def test_main_HTTPError_case(self, idrac_connection_firmware_mock, idrac_default_args,
+ idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
+ "share_user": "sharename", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": True
+ })
+ json_str = to_text(json.dumps({"data": "out"}))
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_omsdk',
+ side_effect=HTTPError('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+
+ def test_update_firmware_omsdk_success_case01(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock, idrac_default_args, mocker,
+ re_match_mock):
+ idrac_default_args.update({"share_name": "https://downloads.dell.com", "catalog_file_name": "Catalog.xml",
+ "share_user": "UserName", "share_password": "sharepswd",
+ "share_mnt": "shrmnt",
+ "reboot": True, "job_wait": True, "ignore_cert_warning": True,
+ "apply_update": True})
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
+ return_value=({"update_status": {"job_details": {"Data": {"StatusCode": 200,
+ "body": {"PackageList": [{}]}}}}},
+ {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
+
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
+ return_value=({"BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64"
+ "_19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-"
+ "OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ }, True, False))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.match.return_value = "2.70"
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
+ idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {
+ "job_details": {"Data": {"StatusCode": 200, "GetRepoBasedUpdateList_OUTPUT": {},
+ "body": {"PackageList1": [{}]}}}
+ }
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result["update_status"]["job_details"]["Data"]["StatusCode"] == 200
+
+ def test_update_firmware_omsdk_success_case02(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock, idrac_default_args, mocker,
+ re_match_mock, fileonshare_idrac_firmware_mock):
+ idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
+ "share_user": "UserName", "share_password": "sharepswd",
+ "share_mnt": "shrmnt",
+ "reboot": True, "job_wait": True, "ignore_cert_warning": True,
+ "apply_update": True
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
+ return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
+ "body": {"PackageList": [{}]}}}}},
+ {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
+
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
+ return_value=({"BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64"
+ "_19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-"
+ "OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ }, True))
+
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.match.return_value = "2.70"
+ idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
+ idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {
+ "job_details": {"Data": {"StatusCode": 200, "GetRepoBasedUpdateList_OUTPUT": {},
+ "body": {"PackageList": [{}]}}}}
+ upd_share = fileonshare_idrac_firmware_mock
+ upd_share.IsValid = True
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result["update_status"]["job_details"]["Data"]["StatusCode"] == 200
+
+ def test_update_firmware_redfish_success_case03(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock,
+ idrac_default_args, mocker, re_match_mock):
+ idrac_default_args.update({"share_name": "https://downloads.dell.com", "catalog_file_name": "Catalog.xml",
+ "share_user": "UserName", "share_password": "sharepswd",
+ "share_mnt": "shrmnt",
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "apply_update": True
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_redfish",
+ return_value=(
+ {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}},
+ {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
+
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
+ return_value=({"BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_"
+ "19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-"
+ "Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ }, True))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.re_match_mock.group = Mock(return_value="3.30")
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "3.30"}
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
+ idrac_connection_firmware_mock.ServerGeneration = "14"
+ result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert result["changed"] is False
+ assert result["update_msg"] == "Successfully triggered the job to update the firmware."
+
+ def test_update_firmware_omsdk_status_success_case01(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock, idrac_default_args,
+ mocker, re_match_mock, fileonshare_idrac_firmware_mock):
+ idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
+ "share_user": "UserName", "share_password": "sharepswd",
+ "share_mnt": "sharemnt",
+ "reboot": True, "job_wait": True, "ignore_cert_warning": True,
+ "apply_update": True
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
+ return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
+ "body": {"PackageList": [{}]}}}}},
+ {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}}))
+
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
+ return_value={
+ "BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-Deployment_"
+ "Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ })
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_firmware_mock.match.return_value = "2.70"
+ idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {"job_details": {
+ "Data": {"StatusCode": 200, "body": {}, "GetRepoBasedUpdateList_OUTPUT": {}}, "Status": "Success"},
+ "Status": "Success"}
+ upd_share = fileonshare_idrac_firmware_mock
+ upd_share.IsValid = True
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result == {'changed': False, 'failed': False,
+ 'update_msg': 'Successfully triggered the job to update the firmware.',
+ 'update_status': {'Status': 'Success',
+ 'job_details': {'Data': {'StatusCode': 200, 'body': {},
+ "GetRepoBasedUpdateList_OUTPUT": {}},
+ 'Status': 'Success'}}}
+
+ def test_update_firmware_omsdk_status_failed_case01(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock,
+ idrac_default_args, mocker, re_match_mock):
+ idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
+ "share_user": "UserName", "share_password": "sharepswd",
+ "share_mnt": "sharemnt",
+ "reboot": True, "job_wait": True, "ignore_cert_warning": True,
+ "apply_update": True})
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
+ return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
+ "body": {"PackageList": [{}]}}}}},
+ {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}}))
+
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
+ return_value={
+ "BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-Deployment_"
+ "Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ })
+
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.match.return_value = "2.70"
+ idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {"job_details": {"Data": {
+ "StatusCode": 200, "body": {}, "GetRepoBasedUpdateList_OUTPUT": {}}, "Status": "Failed"},
+ "Status": "Failed"}
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert ex.value.args[0] == "Firmware update failed."
+
+ def test__validate_catalog_file_case01(self, idrac_connection_firmware_mock, idrac_default_args):
+ idrac_default_args.update({"catalog_file_name": ""})
+ with pytest.raises(ValueError) as exc:
+ self.module._validate_catalog_file("")
+ assert exc.value.args[0] == 'catalog_file_name should be a non-empty string.'
+
+ def test__validate_catalog_file_case02(self, idrac_connection_firmware_mock, idrac_default_args):
+ idrac_default_args.update({"catalog_file_name": "Catalog.json"})
+ with pytest.raises(ValueError) as exc:
+ self.module._validate_catalog_file("Catalog.json")
+ assert exc.value.args[0] == 'catalog_file_name should be an XML file.'
+
+ def test_convert_xmltojson_case01(self, mocker, idrac_connection_firmware_mock,
+ idrac_default_args, ET_convert_mock):
+ idrac_default_args.update({"PackageList": [{
+ "BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackagePath":
+ "FOLDER05902898M/1/Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12"}]})
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_job_status", return_value=("Component", False))
+ mocker.patch(MODULE_PATH + 'idrac_firmware.ET')
+ result = self.module._convert_xmltojson({"PackageList": [{"INSTANCENAME": {"PROPERTY": {"NAME": "abc"}}}]},
+ MagicMock(), None)
+ assert result == ([], True, False)
+
+ def test_convert_xmltojson_case02(self, mocker, idrac_connection_firmware_mock, idrac_default_args):
+ idrac_default_args.update({"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}})
+ packagelist = {"PackageList": "INSTANCENAME"}
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_job_status", return_value=("Component", False))
+ mocker.patch(MODULE_PATH + 'idrac_firmware.ET')
+ result = self.module._convert_xmltojson(packagelist, MagicMock(), None)
+ assert result == ([], True, False)
+
+ def test_get_jobid_success_case01(self, idrac_connection_firmware_mock, idrac_default_args,
+ idrac_firmware_job_mock,
+ idrac_connection_firmware_redfish_mock):
+ idrac_default_args.update({"Location": "https://jobmnager/jid123"})
+ idrac_firmware_job_mock.status_code = 202
+ idrac_firmware_job_mock.Success = True
+ idrac_connection_firmware_redfish_mock.update_mgr.headers.get().split().__getitem__().return_value = "jid123"
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.get_jobid(f_module, idrac_firmware_job_mock)
+ assert result == idrac_connection_firmware_redfish_mock.headers.get().split().__getitem__()
+
+ def test_get_jobid_fail_case01(self, idrac_connection_firmware_mock, idrac_default_args,
+ idrac_firmware_job_mock):
+ idrac_firmware_job_mock.status_code = 202
+ idrac_firmware_job_mock.headers = {"Location": None}
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as exc:
+ self.module.get_jobid(f_module, idrac_firmware_job_mock)
+ assert exc.value.args[0] == "Failed to update firmware."
+
+ def test_get_jobid_fail_case02(self, idrac_connection_firmware_mock, idrac_default_args,
+ idrac_firmware_job_mock):
+ idrac_firmware_job_mock.status_code = 400
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as exc:
+ self.module.get_jobid(f_module, idrac_firmware_job_mock)
+ assert exc.value.args[0] == "Failed to update firmware."
+
+ def test_update_firmware_url_omsdk_success_case02(self, idrac_connection_firmware_mock, idrac_default_args,
+ mocker, idrac_connection_firmware_redfish_mock):
+ idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
+ "share_user": "shareuser", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
+ return_value="23451")
+
+ mocker.patch(MODULE_PATH + "idrac_firmware.urlparse",
+ return_value=ParseResult(scheme='http', netloc='downloads.dell.com',
+ path='/%7Eguido/Python.html',
+ params='', query='', fragment=''))
+ mocker.patch("socket.gethostbyname", return_value="downloads.dell.com")
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.use_redfish = False
+ idrac_connection_firmware_redfish_mock.get_job_status_redfish = "Status"
+ idrac_connection_firmware_redfish_mock.update_mgr.job_mgr.job_wait.return_value = "12345"
+ idrac_connection_firmware_mock.update_mgr.update_from_repo_url.return_value = {
+ "update_status": {"job_details": {"data": {
+ "StatusCode": 200,
+ "body": {
+ "PackageList": [
+ {}]
+ }
+ }
+ }
+ }
+ }
+ idrac_connection_firmware_mock.update_mgr.update_from_dell_repo_url.return_value = {"job_details": {"Data": {
+ "GetRepoBasedUpdateList_OUTPUT": {
+ "Message": [
+ {}]
+ }
+ }
+ }
+ }
+ payload = {"ApplyUpdate": "True",
+ "CatalogFile": "Catalog.xml",
+ "IgnoreCertWarning": "On",
+ "RebootNeeded": True,
+ "UserName": "username",
+ "Password": "psw"
+ }
+ result = self.module.update_firmware_url_omsdk(f_module, idrac_connection_firmware_mock,
+ "http://downloads.dell.com", "catalog.xml", True, True, True,
+ False, payload)
+ assert result == (
+ {'job_details': {'Data': {'GetRepoBasedUpdateList_OUTPUT': {'Message': [{}]}}}}, {})
+
+ def test_update_firmware_url_omsdk(self, idrac_connection_firmware_mock, idrac_default_args, mocker,
+ idrac_connection_firmware_redfish_mock):
+ idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
+ "share_user": "shareuser", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
+ return_value="23451")
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_check_mode_status")
+ idrac_connection_firmware_mock.use_redfish = True
+ idrac_connection_firmware_mock.job_mgr.get_job_status_redfish.return_value = "23451"
+ idrac_connection_firmware_mock.update_mgr.update_from_dell_repo_url.return_value = {
+ "InstanceID": "JID_12345678"}
+ f_module = self.get_module_mock(params=idrac_default_args)
+ payload = {"ApplyUpdate": "True", "CatalogFile": "Catalog.xml", "IgnoreCertWarning": "On",
+ "RebootNeeded": True, "UserName": "username", "Password": "psw"}
+ result = self.module.update_firmware_url_omsdk(f_module, idrac_connection_firmware_mock,
+ "http://downloads.dell.com/repo",
+ "catalog.xml", True, True, True, True, payload)
+ assert result[0] == {"InstanceID": "JID_12345678"}
+
+ def _test_update_firmware_redfish(self, idrac_connection_firmware_mock, idrac_default_args, re_match_mock,
+ mocker, idrac_connection_firmware_redfish_mock,
+ fileonshare_idrac_firmware_mock):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share_name", "catalog_file_name": "catalog.xml",
+ "share_user": "shareuser", "share_password": "sharepswd",
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443, 'apply_update': True
+ })
+ mocker.patch(MODULE_PATH + "idrac_firmware.SHARE_TYPE",
+ return_value={"NFS": "NFS"})
+ mocker.patch(MODULE_PATH + "idrac_firmware.eval",
+ return_value={"PackageList": []})
+ mocker.patch(MODULE_PATH + "idrac_firmware.wait_for_job_completion", return_value=({}, None))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ re_mock = mocker.patch(MODULE_PATH + "idrac_firmware.re",
+ return_value=MagicMock())
+ re_mock.match(MagicMock(), MagicMock()).group.return_value = "3.60"
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
+ return_value="23451")
+ idrac_connection_firmware_mock.idrac.update_mgr.job_mgr.get_job_status_redfish.return_value = "23451"
+ idrac_connection_firmware_mock.ServerGeneration = "14"
+ upd_share = fileonshare_idrac_firmware_mock
+ upd_share.remote_addr.return_value = "192.168.0.1"
+ upd_share.remote.share_name.return_value = "share_name"
+ upd_share.remote_share_type.name.lower.return_value = "NFS"
+ result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully triggered the job to update the firmware."
+
+ def _test_get_job_status(self, idrac_connection_firmware_mock, idrac_default_args,
+ mocker, idrac_connection_firmware_redfish_mock):
+ idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
+ "share_user": "shareuser", "share_password": "sharepswd",
+ "share_mnt": "sharmnt", "apply_update": False,
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"JobStatus": "OK"}
+ each_comp = {"JobID": "JID_1234567", "Messages": [{"Message": "test_message"}], "JobStatus": "Completed"}
+ result = self.module.get_job_status(f_module, each_comp, None)
+ assert result[1] is False
+
+ def test_message_verification(self, idrac_connection_firmware_mock, idrac_connection_firmware_redfish_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
+ "share_user": "shareuser", "share_password": "sharepswd",
+ "share_mnt": "sharmnt", "apply_update": False,
+ "reboot": False, "job_wait": True, "ignore_cert_warning": True,
+ "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443})
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
+ # mocker.patch(MODULE_PATH + "idrac_firmware.re")
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully fetched the applicable firmware update package list."
+
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully triggered the job to stage the firmware."
+
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully staged the applicable firmware update packages."
+
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True})
+ mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
+ return_value=({"Status": "Success"}, {"PackageList": []}))
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, True))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully staged the applicable firmware update packages with error(s)."
+
+ idrac_default_args.update({"apply_update": True, "reboot": True, "job_wait": True})
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, False))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Successfully updated the firmware."
+
+ idrac_default_args.update({"apply_update": True, "reboot": True, "job_wait": True})
+ mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, True))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
+ assert result['update_msg'] == "Firmware update failed."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py
new file mode 100644
index 00000000..787dba2c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_firmware_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, PropertyMock
+from pytest import importorskip
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestFirmware(FakeAnsibleModule):
+ module = idrac_firmware_info
+
+ @pytest.fixture
+ def idrac_firmware_info_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.update_mgr = idrac_obj
+ type(idrac_obj).InstalledFirmware = PropertyMock(return_value="msg")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_firmware_info_connection_mock(self, mocker, idrac_firmware_info_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_firmware_info.iDRACConnection',
+ return_value=idrac_firmware_info_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_info_mock
+ return idrac_firmware_info_mock
+
+ def test_main_idrac_get_firmware_info_success_case01(self, idrac_firmware_info_connection_mock,
+ idrac_default_args):
+ obj2 = MagicMock()
+ idrac_firmware_info_connection_mock.update_mgr = obj2
+ type(obj2).InstalledFirmware = PropertyMock(return_value={"Status": "Success"})
+ result = self._run_module(idrac_default_args)
+ assert result == {"firmware_info": {"Status": "Success"},
+ "msg": "Successfully fetched the firmware inventory details.",
+ "changed": False}
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError])
+ def test_idrac_get_firmware_info_exception_handling_case(self, idrac_firmware_info_connection_mock,
+ exc_type, mocker, idrac_default_args):
+ json_str = to_text(json.dumps({"data": "out"}))
+ obj2 = MagicMock()
+ idrac_firmware_info_connection_mock.update_mgr = obj2
+ if exc_type not in [HTTPError, SSLValidationError]:
+ type(obj2).InstalledFirmware = PropertyMock(side_effect=exc_type('test'))
+ else:
+ type(obj2).InstalledFirmware = PropertyMock(side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py
new file mode 100644
index 00000000..39df4e4c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_job_status_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, PropertyMock
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestLcJobStatus(FakeAnsibleModule):
+ module = idrac_lifecycle_controller_job_status_info
+
+ @pytest.fixture
+ def idrac_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.job_mgr = idrac_obj
+ type(idrac_obj).get_job_status = PropertyMock(return_value="job_id")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_get_lc_job_status_connection_mock(self, mocker, idrac_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_lifecycle_controller_job_status_info.iDRACConnection',
+ return_value=idrac_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_mock
+ return idrac_mock
+
+ def test_main_idrac_get_lc_job_status_success_case01(self, idrac_get_lc_job_status_connection_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"job_id": "job_id"})
+ idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.return_value = {"Status": "Success"}
+ result = self._run_module(idrac_default_args)
+ assert result["changed"] is False
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError])
+ def test_main_exception_handling_case(self, exc_type, mocker, idrac_get_lc_job_status_connection_mock,
+ idrac_default_args):
+ idrac_default_args.update({"job_id": "job_id"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.side_effect = exc_type("url open error")
+ result = self._run_module(idrac_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.side_effect = exc_type("exception message")
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.side_effect = exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py
new file mode 100644
index 00000000..49193267
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py
@@ -0,0 +1,91 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_jobs
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock, PropertyMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestDeleteLcJob(FakeAnsibleModule):
+ module = idrac_lifecycle_controller_jobs
+
+ @pytest.fixture
+ def idrac_lc_job_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.job_mgr = idrac_obj
+ type(idrac_obj).delete_job = PropertyMock(return_value="msg")
+ type(idrac_obj).delete_all_jobs = PropertyMock(return_value="msg")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_delete_lc_job_queue_mock(self, mocker, idrac_lc_job_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_lifecycle_controller_jobs.iDRACConnection', return_value=idrac_lc_job_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_lc_job_mock
+ return idrac_lc_job_mock
+
+ def test_main_idrac_lc_job_success_case01(self, idrac_connection_delete_lc_job_queue_mock, idrac_default_args):
+ idrac_default_args.update({"job_id": "job_id"})
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.return_value = {"Status": "Success"}
+ result = self._run_module(idrac_default_args)
+ assert result == {'changed': True, 'msg': 'Successfully deleted the job.', 'status': {'Status': 'Success'}}
+
+ def test_main_idrac_lc_job_success_case02(self, idrac_connection_delete_lc_job_queue_mock, idrac_default_args):
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_all_jobs.return_value = {"Status": "Success"}
+ result = self._run_module(idrac_default_args)
+ assert result == {'changed': True, 'msg': 'Successfully deleted the job queue.', 'status': {'Status': 'Success'}}
+
+ def test_main_idrac_delete_lc_job_failure_case(self, idrac_connection_delete_lc_job_queue_mock, idrac_default_args):
+ idrac_default_args.update({"job_id": "job_id"})
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.return_value = {"Status": "Error"}
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result == {'failed': True, 'msg': "Failed to delete the Job: {0}.".format("job_id"),
+ 'status': {'Status': 'Error'}}
+
+ @pytest.mark.parametrize("exc_type", [URLError, HTTPError, ImportError, ValueError, RuntimeError, TypeError])
+ def test_main_exception_handling_idrac_lc_job_case(self, exc_type, idrac_connection_delete_lc_job_queue_mock,
+ idrac_default_args):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_all_jobs.side_effect = exc_type('test')
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.side_effect = exc_type('test')
+ else:
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_all_jobs.side_effect = \
+ exc_type('http://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
+ StringIO(json_str))
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.side_effect = \
+ exc_type('http://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
+ StringIO(json_str))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_all_jobs
+ idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py
new file mode 100644
index 00000000..c1a0894e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py
@@ -0,0 +1,108 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_logs
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestExportLcLogs(FakeAnsibleModule):
+ module = idrac_lifecycle_controller_logs
+
+ @pytest.fixture
+ def idrac_export_lc_logs_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.log_mgr = idrac_obj
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_export_lc_logs_mock(self, mocker, idrac_export_lc_logs_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.iDRACConnection',
+ return_value=idrac_export_lc_logs_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_export_lc_logs_mock
+ return idrac_export_lc_logs_mock
+
+ @pytest.fixture
+ def idrac_file_manager_export_lc_logs_mock(self, mocker):
+ try:
+ lclog_file_name_format = "%ip_%Y%m%d_%H%M%S_LC_Log.log"
+ file_manager_obj = mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ file_manager_obj.myshare.new_file(lclog_file_name_format).return_value = obj
+ return file_manager_obj
+
+ def test_main_export_lc_logs_success_case(self, idrac_connection_export_lc_logs_mock, idrac_default_args, mocker,
+ idrac_file_manager_export_lc_logs_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_user": "shareuser",
+ "share_password": "sharepassword", "job_wait": True})
+ message = {"Status": "Success", "JobStatus": "Success"}
+ mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully exported the lifecycle controller logs."
+
+ def test_run_export_lc_logs_success_case01(self, idrac_connection_export_lc_logs_mock, idrac_default_args,
+ idrac_file_manager_export_lc_logs_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "share_password": "sharepassword", "job_wait": True})
+ idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {"Status": "Success"}
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_export_lc_logs(idrac_connection_export_lc_logs_mock, f_module)
+ assert msg == {'Status': 'Success'}
+
+ def test_run_export_lc_logs_status_fail_case01(self, idrac_connection_export_lc_logs_mock, idrac_default_args,
+ idrac_file_manager_export_lc_logs_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "share_password": "sharepassword", "job_wait": True})
+ idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {"Status": "failed"}
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_export_lc_logs(idrac_connection_export_lc_logs_mock, f_module)
+ assert msg == {'Status': 'failed'}
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, HTTPError, URLError])
+ def test_main_export_lc_logs_exception_handling_case(self, exc_type, mocker, idrac_connection_export_lc_logs_mock,
+ idrac_default_args, idrac_file_manager_export_lc_logs_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_user": "shareuser",
+ "share_password": "sharepassword", "job_wait": True})
+ idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {"Status": "Failed"}
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py
new file mode 100644
index 00000000..d00e2bc0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_status_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import PropertyMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestLcStatus(FakeAnsibleModule):
+ module = idrac_lifecycle_controller_status_info
+
+ @pytest.fixture
+ def idrac_lc_status_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).LCStatus = Mock(return_value="lcstatus")
+ type(idrac_obj).LCReady = Mock(return_value="lcready")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_lcstatus_mock(self, mocker, idrac_lc_status_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_status_info.iDRACConnection',
+ return_value=idrac_lc_status_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_lc_status_mock
+ return idrac_lc_status_mock
+
+ def test_main_get_lcstatus_success_case01(self, idrac_connection_lcstatus_mock, idrac_default_args):
+ obj2 = MagicMock()
+ idrac_connection_lcstatus_mock.config_mgr = obj2
+ type(obj2).LCStatus = PropertyMock(return_value="lcstatus")
+ type(obj2).LCReady = PropertyMock(return_value="lcready")
+ result = self._run_module(idrac_default_args)
+ assert result['lc_status_info']['LCReady'] == "lcready"
+ assert result['lc_status_info']['LCStatus'] == "lcstatus"
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, HTTPError, URLError])
+ def test_main_get_lcstatus_exception_handling_case(self, exc_type, idrac_connection_lcstatus_mock,
+ idrac_default_args):
+ obj2 = MagicMock()
+ idrac_connection_lcstatus_mock.config_mgr = obj2
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ type(obj2).LCReady = PropertyMock(side_effect=exc_type("url open error"))
+ result = self._run_module(idrac_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ type(obj2).LCReady = PropertyMock(side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+ else:
+ type(obj2).LCReady = PropertyMock(side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
new file mode 100644
index 00000000..10f7183f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
@@ -0,0 +1,286 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_network
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestConfigNetwork(FakeAnsibleModule):
+ module = idrac_network
+
+ @pytest.fixture
+ def idrac_configure_network_mock(self):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="networkstatus")
+ type(idrac_obj).set_liason_share = Mock(return_value="networkstatus")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_config_networking_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ MODULE_PATH + 'idrac_network.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def idrac_connection_configure_network_mock(self, mocker, idrac_configure_network_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_network.iDRACConnection',
+ return_value=idrac_configure_network_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_network_mock
+ return idrac_configure_network_mock
+
+ def test_main_idrac_configure_network_success_case(self, idrac_connection_configure_network_mock, mocker,
+ idrac_default_args, idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None})
+ message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH + 'idrac_network.run_idrac_network_config', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully configured the idrac network settings.',
+ 'network_status': {
+ 'changed': False,
+ 'msg': {'Status': 'Success', 'message': 'No changes found to commit!'}},
+ 'changed': False, 'failed': False}
+ status_msg = {"Status": "Success", "Message": "No changes found to commit!"}
+ mocker.patch(MODULE_PATH + 'idrac_network.run_idrac_network_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the idrac network settings."
+ status_msg = {"Status": "Success", "Message": "No changes were applied"}
+ mocker.patch(MODULE_PATH + 'idrac_network.run_idrac_network_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the idrac network settings."
+
+ def test_run_idrac_network_config_success_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ idrac_connection_configure_network_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
+
+ def test_run_idrac_network_config_success_case02(self, idrac_connection_configure_network_mock, idrac_default_args,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'Status': 'Success',
+ 'changed': True,
+ 'changes_applicable': True,
+ 'message': 'changes found to commit!'}
+
+ def test_run_idrac_network_config_success_case03(self, idrac_connection_configure_network_mock, idrac_default_args,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_network_config_success_case04(self, idrac_connection_configure_network_mock,
+ idrac_default_args, idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'Message': 'No changes were applied',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_network_config_success_case05(self, idrac_connection_configure_network_mock, idrac_default_args,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": None,
+ "dns_idrac_name": None, "auto_config": None, "static_dns": None,
+ "setup_idrac_nic_vlan": None, "vlan_id": None, "vlan_priority": None,
+ "enable_nic": None, "nic_selection": None,
+ "failover_network": None, "auto_detect": None, "auto_negotiation": None,
+ "network_speed": None, "duplex_mode": None, "nic_mtu": None,
+ "enable_dhcp": None, "ip_address": None, "enable_ipv4": None,
+ "dns_from_dhcp": None, "static_dns_1": None, "static_dns_2": None,
+ "static_gateway": None, "static_net_mask": None})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_network_mock.config_mgr.configure_dns.return_value = message
+ idrac_connection_configure_network_mock.config_mgr.configure_nic_vlan.return_value = message
+ idrac_connection_configure_network_mock.config_mgr.configure_network_settings.return_value = message
+ idrac_connection_configure_network_mock.config_mgr.configure_ipv4.return_value = message
+ idrac_connection_configure_network_mock.config_mgr.configure_static_ipv4.return_value = message
+ idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'Message': 'No changes were applied',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_network_config_failed_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ result = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert result == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
+
+ def test_run_idrac_network_config_failed_case02(self, idrac_connection_configure_network_mock,
+ idrac_default_args, idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "failed"}
+ idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == {'Message': 'No changes were applied', 'Status': 'failed', 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_network_config_failed_case03(self, idrac_connection_configure_network_mock,
+ idrac_default_args, idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "register_idrac_on_dns": "Enabled",
+ "dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
+ "setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
+ "enable_nic": "Enabled", "nic_selection": "Dedicated",
+ "failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
+ "network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
+ "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
+ "static_dns_2": "staticdns2", "static_gateway": "staticgateway",
+ "static_net_mask": "staticnetmask"})
+ message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
+ idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
+ assert msg == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, HTTPError, URLError])
+ def test_main_idrac_configure_network_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_configure_network_mock,
+ idrac_file_manager_config_networking_mock):
+ idrac_default_args.update({"share_name": None})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'idrac_network.run_idrac_network_config',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'idrac_network.run_idrac_network_config',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py
new file mode 100644
index 00000000..d8967356
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py
@@ -0,0 +1,166 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_os_deployment
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \
+ fail_json, AnsibleFailJson, AnsibleExitJson
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+
+
+class TestOsDeployment(FakeAnsibleModule):
+ module = idrac_os_deployment
+
+ @pytest.fixture
+ def idrac_connection_mock(self, mocker, idrac_mock):
+ idrac_connection_class_mock = mocker.patch(
+ MODULE_PATH + 'idrac_os_deployment.iDRACConnection')
+ # idrac_connection_class_mock.return_value = idrac_mock
+ idrac_connection_class_mock.return_value.__enter__.return_value = idrac_mock
+ return idrac_connection_class_mock
+
+ @pytest.fixture
+ def idrac_mock(self, mocker):
+ sdkinfra_obj = mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.sdkinfra')
+ obj = MagicMock()
+ sdkinfra_obj.get_driver.return_value = obj
+ return sdkinfra_obj
+
+ @pytest.fixture
+ def omsdk_mock(self, mocker):
+ mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.UserCredentials')
+ mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.WsManOptions')
+
+ @pytest.fixture
+ def fileonshare_mock(self, mocker):
+ share_mock = mocker.patch(MODULE_PATH + 'idrac_os_deployment.FileOnShare',
+ return_value=MagicMock())
+ return share_mock
+
+ @pytest.fixture
+ def minutes_to_cim_format_mock(self, mocker):
+ validate_device_inputs_mock = mocker.patch(
+ MODULE_PATH + 'idrac_os_deployment.minutes_to_cim_format')
+ validate_device_inputs_mock.return_value = "time"
+
+ @pytest.mark.parametrize("expose_duration_val", ["abc", None, "", 1.5, {"abc": 1}, [110, 210, 300], [120]])
+ def test_main_failure_case_01(self, expose_duration_val, idrac_default_args, module_mock):
+ """when invalid value for expose_durationis given """
+ idrac_default_args.update({"iso_image": "iso_image"})
+ idrac_default_args.update({"expose_duration": expose_duration_val})
+ result = self._run_module_with_fail_json(idrac_default_args)
+
+ def test_main_failure_case_02(self, module_mock, idrac_default_args):
+ """when required arg iso_image is not passed"""
+ idrac_default_args.update({"iso_image": "iso_image"})
+ result = self._run_module_with_fail_json(idrac_default_args)
+
+ def test_main_failure_case_03(self, module_mock, idrac_default_args):
+ """when invalid ansible option is given"""
+ idrac_default_args.update({"iso_image": "iso_image", "invalid_key": "val"})
+ result = self._run_module_with_fail_json(idrac_default_args)
+
+ def test_main_run_boot_to_network_iso_success_case01(self, idrac_connection_mock, idrac_mock, module_mock,
+ fileonshare_mock, omsdk_mock, minutes_to_cim_format_mock):
+ idrac_connection_mock.return_value.__enter__.return_value = idrac_mock
+ idrac_mock.config_mgr.boot_to_network_iso.return_value = {"Status": "Success"}
+ params = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
+ "ca_path": "/path/to/ca_cert.pem",
+ "share_name": "dummy_share_name", "share_password": "dummy_share_password",
+ "iso_image": "dummy_iso_image", "expose_duration": "100"
+ }
+ set_module_args(params)
+ result = self._run_module(params)
+ assert result == {'changed': True, 'boot_status': {'Status': 'Success'}}
+
+ def test_main_run_boot_to_network_iso_success_case02(self, idrac_connection_mock, idrac_mock, module_mock,
+ fileonshare_mock, omsdk_mock, minutes_to_cim_format_mock):
+ """share_name None case"""
+ idrac_connection_mock.return_value.__enter__.return_value = idrac_mock
+ idrac_mock.config_mgr.boot_to_network_iso.return_value = {"Status": "Success"}
+ params = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
+ "ca_path": "/path/to/ca_cert.pem",
+ "share_name": None, "share_password": "dummy_share_password",
+ "iso_image": "dummy_iso_image", "expose_duration": "100"
+ }
+ set_module_args(params)
+ result = self._run_module(params)
+ assert result == {'changed': True, 'boot_status': {'Status': 'Success'}}
+
+ def test_main_run_boot_to_network_iso_fleonshare_failure_case(self, idrac_connection_mock, idrac_mock, module_mock,
+ fileonshare_mock, omsdk_mock,
+ minutes_to_cim_format_mock):
+ idrac_connection_mock.return_value.__enter__.return_value = idrac_mock
+ fileonshare_mock.side_effect = RuntimeError("Error in Runtime")
+ params = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
+ "ca_path": "/path/to/ca_cert.pem",
+ "share_name": "invalid_share_name", "share_password": "dummy_share_password",
+ "iso_image": "dummy_iso_image", "expose_duration": "100"
+ }
+ set_module_args(params)
+ result = self._run_module_with_fail_json(params)
+ assert result == {'failed': True, 'msg': 'Error in Runtime'}
+
+ def test_main_run_boot_to_network_iso_failure_case(self, idrac_connection_mock, idrac_mock, module_mock,
+ fileonshare_mock, omsdk_mock, minutes_to_cim_format_mock):
+ idrac_mock.config_mgr.boot_to_network_iso.return_value = {"Status": "Failure"}
+ params = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
+ "ca_path": "/path/to/ca_cert.pem",
+ "share_name": "dummy_share_name", "share_password": "dummy_share_password",
+ "iso_image": "dummy_iso_image", "expose_duration": "100"
+ }
+ set_module_args(params)
+ result = self._run_module_with_fail_json(params)
+ assert result['failed'] is True
+
+ def test_minutes_to_cim_format_success_case_01(self, module_mock):
+ result = self.module.minutes_to_cim_format(module_mock, 180)
+ assert result == '00000000030000.000000:000'
+
+ def test_minutes_to_cim_format_success_case_02(self, module_mock):
+ result = self.module.minutes_to_cim_format(module_mock, 0)
+ assert result == '00000000000000.000000:000'
+
+ def test_minutes_to_cim_format_success_case_03(self, module_mock):
+ """when day>0 condition"""
+ result = self.module.minutes_to_cim_format(module_mock, 2880)
+ assert result == '00000002230000.000000:000'
+
+ def test_minutes_to_cim_format_failure_case(self):
+ fmodule = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ set_module_args({})
+ self.module.minutes_to_cim_format(fmodule, -1)
+ assert exc.value.args[0] == "Invalid value for ExposeDuration."
+
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ def test_main_idrac_os_deployment_exception_handling_case(self, exc_type, mocker, idrac_connection_mock,
+ idrac_default_args, idrac_mock, fileonshare_mock,
+ omsdk_mock):
+ idrac_default_args.update({"iso_image": "iso_image", "share_name": "share_name"})
+ idrac_default_args.update({"expose_duration": 10})
+ mocker.patch(MODULE_PATH + 'idrac_os_deployment.run_boot_to_network_iso',
+ side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py
new file mode 100644
index 00000000..99185a93
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py
@@ -0,0 +1,316 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.3.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_redfish_storage_controller
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.urls import urllib_error
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def redfish_str_controller_conn(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'idrac_redfish_storage_controller.Redfish')
+ idrac_redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ idrac_redfish_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return idrac_redfish_connection_mock_obj
+
+
+class TestIdracRedfishStorageController(FakeAnsibleModule):
+ module = idrac_redfish_storage_controller
+
+ def test_check_id_exists(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password"}
+ uri = "/redfish/v1/Dell/Systems/{system_id}/Storage/DellController/{controller_id}"
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.status_code = 200
+ result = self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id",
+ "RAID.Integrated.1-1", uri)
+ assert result is None
+ redfish_response_mock.success = False
+ redfish_response_mock.status_code = 400
+ with pytest.raises(Exception) as ex:
+ self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id",
+ "RAID.Integrated.1-1", uri)
+ assert ex.value.args[0] == "controller_id with id 'RAID.Integrated.1-1' not found in system"
+
+ def test_validate_inputs(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "ReKey", "mode": "LKM"}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as ex:
+ self.module.validate_inputs(f_module)
+ assert ex.value.args[0] == "All of the following: key, key_id and old_key are required for 'ReKey' operation."
+ param.update({"command": "AssignSpare", "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1",
+ "Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"]})
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as ex:
+ self.module.validate_inputs(f_module)
+ assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
+ "physical disk must be only one."
+ param.update({"volume_id": ["Disk.Virtual.0:RAID.Mezzanine.1C-0",
+ "Disk.Virtual.0:RAID.Mezzanine.1C-1"], "target": None})
+ with pytest.raises(Exception) as ex:
+ self.module.validate_inputs(f_module)
+ assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
+ "virtual drive must be only one."
+ param.update({"command": "EnableControllerEncryption"})
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as ex:
+ self.module.validate_inputs(f_module)
+ assert ex.value.args[0] == "All of the following: key, key_id are " \
+ "required for 'EnableControllerEncryption' operation."
+ param.update({"command": "ChangePDStateToOnline",
+ "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1",
+ "Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1"]})
+ with pytest.raises(Exception) as ex:
+ self.module.validate_inputs(f_module)
+ assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
+ "physical disk must be only one."
+
+ def test_target_identify_pattern(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "BlinkTarget", "target": "Disk.Bay.1:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
+ "volume_id": "Disk.Virtual.0:RAID.Mezzanine.1C-1"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.status_code = 200
+ result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
+ assert result.status_code == 200
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+
+ def test_ctrl_reset_config(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "controller_id": "RAID.Mezzanine.1C-1", "command": "ResetConfig"}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_str_controller_conn.json_data = {"Members": ["virtual_drive"]}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.ctrl_reset_config(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_reset_config(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"Members": []}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_reset_config(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_hot_spare_config(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "AssignSpare", "target": "Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.json_data = {"HotspareType": "None"}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.hot_spare_config(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.hot_spare_config(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"HotspareType": "Global"}
+ with pytest.raises(Exception) as ex:
+ self.module.hot_spare_config(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_ctrl_key(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "SetControllerKey", "controller_id": "RAID.Integrated.1-1", "mode": "LKM"}
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionNotCapable", "KeyID": None}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "The storage controller 'RAID.Integrated.1-1' does not support encryption."
+ f_module.check_mode = True
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": "Key@123"}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ param.update({"command": "ReKey"})
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ param.update({"command": "RemoveControllerKey"})
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+ param.update({"command": "EnableControllerEncryption"})
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"SecurityStatus": "SecurityKeyAssigned", "KeyID": None}
+ with pytest.raises(Exception) as ex:
+ self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ def test_convert_raid_status(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "ConvertToRAID", "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1",
+ "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "NonRAID"}}}}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.convert_raid_status(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.convert_raid_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Ready"}}}}
+ with pytest.raises(Exception) as ex:
+ self.module.convert_raid_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_change_pd_status(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "ChangePDStateToOnline",
+ "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1",
+ "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "NonRAID"}}}}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.change_pd_status(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.change_pd_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Online"}}}}
+ with pytest.raises(Exception) as ex:
+ self.module.change_pd_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_lock_virtual_disk(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "LockVirtualDisk",
+ "volume_id": "Disk.Virtual.0:RAID.SL.3-1"}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellVolume": {"LockStatus": "Unlocked"}}}}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellVolume": {"LockStatus": "Locked"}}}}
+ with pytest.raises(Exception) as ex:
+ self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError])
+ def test_main_error(self, redfish_str_controller_conn, redfish_response_mock, mocker,
+ exc_type, redfish_default_args):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "ResetConfig", "controller_id": "RAID.Integrated.1-1"}
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', return_value=None)
+ redfish_response_mock.success = False
+ redfish_response_mock.status_code = 400
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(redfish_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config',
+ side_effect=exc_type('exception message'))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ def test_main_success(self, redfish_str_controller_conn, redfish_response_mock, redfish_default_args, mocker):
+ param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ "command": "SetControllerKey", "key": "Key@123", "key_id": "keyid@123",
+ "controller_id": "RAID.Integrated.1-1",
+ "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_key',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"command": "AssignSpare"})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.hot_spare_config',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"command": "BlinkTarget"})
+ redfish_default_args.update(param)
+ redfish_response_mock.status_code = 200
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.target_identify_pattern',
+ return_value=redfish_response_mock)
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "Successfully performed the 'BlinkTarget' operation."
+ param.update({"command": "ConvertToRAID"})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.convert_raid_status',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"command": "ChangePDStateToOnline", "job_wait": True})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.change_pd_status',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.wait_for_job_completion',
+ return_value=(redfish_response_mock, ""))
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.strip_substr_dict',
+ return_value={"JobState": "Failed"})
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
new file mode 100644
index 00000000..3f4ca497
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_reset
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock, patch, Mock
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def idrac_reset_connection_mock(mocker, idrac_mock):
+ idrac_connection_class_mock = mocker.patch(MODULE_PATH + 'idrac_reset.iDRACConnection')
+ idrac_connection_class_mock.return_value.__enter__.return_value = idrac_mock
+ return idrac_mock
+
+
+class TestReset(FakeAnsibleModule):
+ module = idrac_reset
+
+ @pytest.fixture
+ def idrac_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).reset_idrac = Mock(return_value="idracreset")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_config_mngr_reset_mock(self, mocker):
+ try:
+ config_manager_obj = mocker.patch(MODULE_PATH + 'idrac_reset.config_mgr')
+ except AttributeError:
+ config_manager_obj = MagicMock()
+ obj = MagicMock()
+ config_manager_obj.config_mgr.return_value = obj
+ config_manager_obj.config_mgr.reset_idrac().return_value = obj
+ return config_manager_obj
+
+ def test_main_idrac_reset_success_case01(self, idrac_reset_connection_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + "idrac_reset.run_idrac_reset",
+ return_value=({"Status": "Success"}, False))
+ idrac_reset_connection_mock.config_mgr.reset_idrac.return_value = {"Status": "Success"}
+ idrac_reset_connection_mock.config_mgr.reset_idrac.return_value = "Success"
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully performed iDRAC reset.',
+ 'reset_status': ({'Status': 'Success'}, False), 'changed': False}
+
+ def test_run_idrac_reset_success_case01(self, idrac_reset_connection_mock, idrac_default_args):
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.run_idrac_reset(idrac_reset_connection_mock, f_module)
+ assert result == idrac_reset_connection_mock.config_mgr.reset_idrac()
+
+ def test_run_idrac_reset_status_success_case02(self, idrac_reset_connection_mock, idrac_default_args):
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ result = self.module.run_idrac_reset(idrac_reset_connection_mock, f_module)
+ assert result == {'Message': 'Changes found to commit!', 'Status': 'Success', 'changes_applicable': True}
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError])
+ def test_main_exception_handling_case(self, exc_type, mocker, idrac_reset_connection_mock, idrac_default_args):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset', side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py
new file mode 100644
index 00000000..16d5b030
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py
@@ -0,0 +1,356 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.4.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_server_config_profile
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants,\
+ AnsibleExitJson
+from mock import MagicMock, patch, Mock, mock_open
+from pytest import importorskip
+from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+
+class TestServerConfigProfile(FakeAnsibleModule):
+ module = idrac_server_config_profile
+
+ @pytest.fixture
+ def idrac_server_configure_profile_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_server_config_profile_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ MODULE_PATH + 'idrac_server_config_profile.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def idrac_scp_redfish_mock(self, mocker, idrac_server_configure_profile_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_server_config_profile.iDRACRedfishAPI',
+ return_value=idrac_server_configure_profile_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_server_configure_profile_mock
+ return idrac_server_configure_profile_mock
+
+ def test_run_export_import_http(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "export",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML", "export_use": "Default"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ export_response = {"msg": "Successfully exported the Server Configuration Profile.",
+ "scp_status": {"Name": "Export: Server Configuration Profile", "PercentComplete": 100,
+ "TaskState": "Completed", "TaskStatus": "OK", "Id": "JID_236654661194"}}
+ mocker.patch(MODULE_PATH + "idrac_server_config_profile.urlparse",
+ return_value=ParseResult(scheme='http', netloc='192.168.0.1',
+ path='/share/',
+ params='', query='', fragment=''))
+ mocker.patch(MODULE_PATH + "idrac_server_config_profile.response_format_change",
+ return_value=export_response)
+ result = self.module.run_export_import_scp_http(idrac_scp_redfish_mock, f_module)
+ assert result["msg"] == "Successfully exported the Server Configuration Profile."
+ idrac_default_args.update({"command": "import"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ import_response = {"msg": "Successfully imported the Server Configuration Profile.",
+ "scp_status": {"Name": "Import: Server Configuration Profile", "PercentComplete": 100,
+ "TaskState": "Completed", "TaskStatus": "OK", "Id": "JID_236654661194"}}
+ mocker.patch(MODULE_PATH + "idrac_server_config_profile.response_format_change",
+ return_value=import_response)
+ result = self.module.run_export_import_scp_http(idrac_scp_redfish_mock, f_module)
+ assert result["msg"] == "Successfully imported the Server Configuration Profile."
+
+ def test_http_share_msg_main(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "http://192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False})
+ share_return = {"Oem": {"Dell": {"MessageId": "SYS069"}}}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value=share_return)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully triggered the job to import the Server Configuration Profile."
+ share_return = {"Oem": {"Dell": {"MessageId": "SYS053"}}}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value=share_return)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully triggered the job to import the Server Configuration Profile."
+ idrac_default_args.update({"command": "export"})
+ share_return = {"Oem": {"Dell": {"MessageId": "SYS043"}}}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value=share_return)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully triggered the job to export the Server Configuration Profile."
+
+ def test_export_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ share_return = {"Oem": {"Dell": {"MessageId": "SYS069"}}}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value=share_return)
+ f_module.check_mode = False
+ result = self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
+ assert result["file"] == "192.168.0.1:/share/scp_file.xml"
+ idrac_default_args.update({"share_name": "\\\\100.96.16.123\\cifsshare"})
+ result = self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
+ assert result["file"] == "\\\\100.96.16.123\\cifsshare\\scp_file.xml"
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
+ return_value={"TaskStatus": "Critical"})
+ with pytest.raises(Exception) as ex:
+ self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
+ assert ex.value.args[0] == "Failed to import scp."
+
+ def test_response_format_change(self, idrac_scp_redfish_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_scp_redfish_mock.json_data = {"Oem": {"Dell": {"key": "value"}}}
+ result = self.module.response_format_change(idrac_scp_redfish_mock, f_module, "export_scp.yml")
+ assert result["key"] == "value"
+ idrac_default_args.update({"command": "export"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.response_format_change(idrac_scp_redfish_mock, f_module, "export_scp.yml")
+ assert result["key"] == "value"
+
+ def test_preview_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "preview", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file.xml",
+ "end_host_power_state": "On", "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
+ "job_wait": True}
+ f_module.check_mode = False
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
+ return_value=(share, "scp_file.xml"))
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
+ return_value={"Status": "Success"})
+ result = self.module.preview_scp_redfish(f_module, idrac_scp_redfish_mock, True, import_job_wait=False)
+ assert result["Status"] == "Success"
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
+ return_value={"TaskStatus": "Critical"})
+ with pytest.raises(Exception) as ex:
+ self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
+ assert ex.value.args[0] == "Failed to preview scp."
+ idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "preview", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file.xml",
+ "end_host_power_state": "On", "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
+ "job_wait": True, "share_type": "LOCAL", "share_name": "share_name"}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
+ return_value=(share, "scp_file.xml"))
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.exists',
+ return_value=False)
+ with pytest.raises(Exception) as ex:
+ self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, False)
+ assert ex.value.args[0] == "Invalid file path provided."
+
+ def test_import_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.preview_scp_redfish',
+ return_value={"MessageId": "SYS081"})
+ with pytest.raises(Exception) as ex:
+ self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
+ assert ex.value.args[0] == "Changes found to be applied."
+ idrac_default_args.update({"share_name": "http://192.168.0.1/http-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
+ return_value={"Status": "Success"})
+ result = self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
+ assert result["Status"] == "Success"
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
+ return_value={"TaskStatus": "Critical"})
+ with pytest.raises(Exception) as ex:
+ self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
+ assert ex.value.args[0] == "Failed to import scp."
+ idrac_default_args.update({"share_name": "local-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
+ "job_wait": True, "share_type": "LOCAL", "share_name": "share_name"}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
+ return_value=(share, "scp_file.xml"))
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.exists',
+ return_value=False)
+ with pytest.raises(Exception) as ex:
+ self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, False)
+ assert ex.value.args[0] == "Invalid file path provided."
+
+ def test_get_scp_file_format(self, idrac_scp_redfish_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.get_scp_file_format(f_module)
+ assert result == "scp_file.xml"
+ idrac_default_args.update({"scp_file": None})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module.get_scp_file_format(f_module)
+ assert result.startswith("idrac_ip_") is True
+
+ def test_main_success_case(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "http://192.168.0.1/http-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "import",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value={"MessageId": "SYS069"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {'MessageId': 'SYS069'}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
+ return_value={"MessageId": "SYS053"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {'MessageId': 'SYS053'}
+ idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare"})
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
+ return_value={"Message": "No changes were applied since the current component configuration "
+ "matched the requested configuration"})
+ result = self._run_module(idrac_default_args)
+ assert result["changed"] is False
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
+ return_value={"MessageId": "SYS043"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {'MessageId': 'SYS043'}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
+ return_value={"MessageId": "SYS069"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {'MessageId': 'SYS069'}
+ idrac_default_args.update({"command": "export"})
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.export_scp_redfish',
+ return_value={"Status": "Success"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {'Status': 'Success'}
+ idrac_default_args.update({"command": "preview"})
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.preview_scp_redfish',
+ return_value={"MessageId": "SYS081"})
+ result = self._run_module(idrac_default_args)
+ assert result["scp_status"] == {"MessageId": "SYS081"}
+
+ def test_get_scp_share_details(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "export",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_file_format',
+ return_value="export_scp.xml")
+ result = self.module.get_scp_share_details(f_module)
+ assert result[1] == "export_scp.xml"
+
+ def test_wait_for_response(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "export",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "XML",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_scp_redfish_mock.headers = {"Location": "/redfish/v1/TaskService/Tasks/JID_123456789"}
+ resp_return_value = {"return_data": b"<SystemConfiguration Model='PowerEdge MX840c'>"
+ b"<Component FQDD='System.Embedded.1'>"
+ b"<Attribute Name='Backplane.1#BackplaneSplitMode'>0</Attribute>"
+ b"</Component> </SystemConfiguration>",
+ "return_job": {"JobState": "Completed", "JobType": "ExportConfiguration",
+ "PercentComplete": 100, "Status": "Success"}}
+ idrac_scp_redfish_mock.wait_for_job_complete.return_value = resp_return_value["return_data"]
+ idrac_scp_redfish_mock.job_resp = resp_return_value["return_job"]
+ share = {"share_name": "/local_share", "file_name": "export_file.xml"}
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data=resp_return_value["return_data"])) as mock_file:
+ result = self.module.wait_for_response(idrac_scp_redfish_mock, f_module, share, idrac_scp_redfish_mock)
+ assert result.job_resp == resp_return_value["return_job"]
+
+ def test_wait_for_response_json(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
+ "share_password": "sharepswd", "command": "export",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful", "export_format": "JSON",
+ "export_use": "Default", "validate_certs": False, "idrac_port": 443})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp_return_value = {"return_data": {
+ "SystemConfiguration": {"Components": [
+ {"FQDD": "SupportAssist.Embedded.1",
+ "Attributes": [{"Name": "SupportAssist.1#SupportAssistEULAAccepted"}]
+ }]}
+ },
+ "return_job": {"JobState": "Completed", "JobType": "ExportConfiguration",
+ "PercentComplete": 100, "Status": "Success"}}
+ mock_scp_json_data = idrac_scp_redfish_mock
+ mock_scp_json_data.json_data = resp_return_value["return_data"]
+ idrac_scp_redfish_mock.wait_for_job_complete.return_value = mock_scp_json_data
+ idrac_scp_redfish_mock.job_resp = resp_return_value["return_job"]
+ share = {"share_name": "/local_share", "file_name": "export_file.xml"}
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data=str(resp_return_value["return_data"]))) as mock_file:
+ result = self.module.wait_for_response(idrac_scp_redfish_mock, f_module, share, idrac_scp_redfish_mock)
+ assert result.job_resp == resp_return_value["return_job"]
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py
new file mode 100644
index 00000000..ae89c280
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_syslog
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestSetupSyslog(FakeAnsibleModule):
+ module = idrac_syslog
+
+ @pytest.fixture
+ def idrac_setup_syslog_mock(self):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ MODULE_PATH + 'idrac_syslog.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def idrac_connection_setup_syslog_mock(self, mocker, idrac_setup_syslog_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_syslog.iDRACConnection', return_value=idrac_setup_syslog_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_setup_syslog_mock
+ return idrac_setup_syslog_mock
+
+ def test_main_setup_syslog_success_case01(self, idrac_connection_setup_syslog_mock, idrac_default_args, mocker,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", 'share_password': None, "syslog": "Enabled",
+ 'share_mnt': None, 'share_user': None})
+ message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_syslog.run_setup_idrac_syslog',
+ return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully fetch the syslogs.',
+ 'syslog_status': {
+ 'changed': False,
+ 'msg': {'Status': 'Success', 'message': 'No changes found to commit!'}},
+ 'changed': False}
+
+ def test_run_setup_idrac_syslog_success_case01(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Enabled", "share_password": "sharepassword"})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ idrac_connection_setup_syslog_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
+
+ def test_run_setup_idrac_syslog_success_case02(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Enabled", "share_password": "sharepassword"})
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_setup_syslog_mock.config_mgr.enable_syslog.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'Status': 'Success',
+ 'changed': True,
+ 'changes_applicable': True,
+ 'message': 'changes found to commit!'}
+
+ def test_run_setup_idrac_syslog_success_case03(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Enabled", "share_password": "sharepassword"})
+ message = {"changes_applicable": True, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_setup_syslog_mock.config_mgr.enable_syslog.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': True}
+
+ def test_run_setup_idrac_syslog_success_case04(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Disabled", "share_password": "sharepassword"})
+ message = {"changes_applicable": True, "Message": "No Changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_setup_syslog_mock.config_mgr.disable_syslog.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'Message': 'No Changes found to commit!', 'Status': 'Success',
+ 'changed': False, 'changes_applicable': True}
+
+ def test_run_setup_syslog_disable_case(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "share_password": "sharepassword", "syslog": 'Disabled'})
+ message = "Disabled"
+ idrac_connection_setup_syslog_mock.config_mgr.disable_syslog.return_value = message
+ idrac_connection_setup_syslog_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == 'Disabled'
+
+ def test_run_setup_syslog_enable_case(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "share_password": "sharepassword", "syslog": 'Enabled'})
+ message = "Enabled"
+ idrac_connection_setup_syslog_mock.config_mgr.enable_syslog.return_value = message
+ idrac_connection_setup_syslog_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == "Enabled"
+
+ def test_run_setup_idrac_syslog_failed_case01(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Enable", "share_password": "sharepassword"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_setup_syslog_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_setup_syslog_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ result = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert result == idrac_connection_setup_syslog_mock.config_mgr.is_change_applicable()
+
+ def test_run_setup_idrac_syslog_failed_case03(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock):
+ idrac_default_args.update(
+ {"share_name": "dummy_share_name", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Disabled", "share_password": "sharepassword"})
+ message = {"message": "No changes were applied", "changed": False,
+ "Status": "failed"}
+ idrac_connection_setup_syslog_mock.config_mgr.enable_syslog.return_value = message
+ idrac_connection_setup_syslog_mock.config_mgr.disable_syslog.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_setup_idrac_syslog(idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'Status': 'failed', 'changed': False, 'message': 'No changes were applied'}
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError])
+ def test_main_setup_syslog_exception_handling_case(self, exc_type, mocker, idrac_connection_setup_syslog_mock,
+ idrac_default_args, idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", 'share_password': None,
+ "syslog": "Enabled", 'share_mnt': None, 'share_user': None})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH +
+ 'idrac_syslog.run_setup_idrac_syslog',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH +
+ 'idrac_syslog.run_setup_idrac_syslog',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py
new file mode 100644
index 00000000..dbbb130e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_system_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, Mock
+from pytest import importorskip
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestSystemInventory(FakeAnsibleModule):
+ module = idrac_system_info
+
+ @pytest.fixture
+ def idrac_system_info_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.get_entityjson = idrac_obj
+ type(idrac_obj).get_json_device = Mock(return_value="msg")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_system_info_connection_mock(self, mocker, idrac_system_info_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_system_info.iDRACConnection',
+ return_value=idrac_system_info_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_system_info_mock
+ return idrac_system_info_mock
+
+ def test_idrac_system_info_main_success_case01(self, idrac_system_info_mock, idrac_system_info_connection_mock,
+ idrac_default_args):
+ idrac_system_info_mock.get_entityjson.return_value = None
+ idrac_system_info_connection_mock.get_json_device.return_value = {"status": "Success"}
+ result = self._run_module(idrac_default_args)
+ assert result == {"system_info": {"status": "Success"},
+ "msg": "Successfully fetched the system inventory details.",
+ "changed": False}
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError])
+ def test_idrac_system_info_main_exception_handling_case(self, exc_type, idrac_system_info_connection_mock,
+ idrac_default_args):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ idrac_system_info_connection_mock.get_json_device.side_effect = exc_type('test')
+ else:
+ idrac_system_info_connection_mock.get_json_device.side_effect = exc_type('http://testhost.com', 400,
+ 'http error message',
+ {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py
new file mode 100644
index 00000000..ee1d9d2e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py
@@ -0,0 +1,228 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 6.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_timezone_ntp
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock, PropertyMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from pytest import importorskip
+
+importorskip("omsdk.sdkfile")
+importorskip("omsdk.sdkcreds")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestConfigTimezone(FakeAnsibleModule):
+ module = idrac_timezone_ntp
+
+ @pytest.fixture
+ def idrac_configure_timezone_mock(self, mocker):
+ omsdk_mock = MagicMock()
+ idrac_obj = MagicMock()
+ omsdk_mock.file_share_manager = idrac_obj
+ omsdk_mock.config_mgr = idrac_obj
+ type(idrac_obj).create_share_obj = Mock(return_value="servicesstatus")
+ type(idrac_obj).set_liason_share = Mock(return_value="servicestatus")
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_file_manager_config_timesone_mock(self, mocker):
+ try:
+ file_manager_obj = mocker.patch(
+ MODULE_PATH + 'idrac_timezone_ntp.file_share_manager')
+ except AttributeError:
+ file_manager_obj = MagicMock()
+ obj = MagicMock()
+ file_manager_obj.create_share_obj.return_value = obj
+ return file_manager_obj
+
+ @pytest.fixture
+ def idrac_connection_configure_timezone_mock(self, mocker, idrac_configure_timezone_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.iDRACConnection',
+ return_value=idrac_configure_timezone_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_timezone_mock
+ return idrac_configure_timezone_mock
+
+ def test_main_idrac_timezone_config_success_Case(self, idrac_connection_configure_timezone_mock, idrac_default_args,
+ mocker, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None})
+ message = {'changed': False, 'msg': {'Status': "Success", "Message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.run_idrac_timezone_config', return_value=(message, False))
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully configured the iDRAC time settings.',
+ 'timezone_ntp_status': ({'changed': False,
+ 'msg': {'Status': 'Success',
+ 'Message': 'No changes found to commit!'}}, False),
+ 'changed': False}
+ status_msg = {"Status": "Success", "Message": "No changes found to commit!",
+ "msg": {"Status": "Success", "Message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.run_idrac_timezone_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC time settings."
+
+ def test_run_idrac_timezone_config_success_case01(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {"changes_applicable": True, "message": "changes are applicable"}
+ idrac_connection_configure_timezone_mock.config_mgr.is_change_applicable.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
+
+ def test_run_idrac_timezone_config_success_case02(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_configure_timezone_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'Status': 'Success',
+ 'changed': True,
+ 'changes_applicable': True,
+ 'message': 'changes found to commit!'}
+
+ def test_run_idrac_timezone_config_success_case03(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_timezone_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_timezone_config_success_case04(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_timezone_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_timezone_config_success_case05(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": None,
+ "enable_ntp": None, "ntp_server_1": None, "ntp_server_2": None,
+ "ntp_server_3": None})
+ message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
+ "Status": "Success"}
+ idrac_connection_configure_timezone_mock.config_mgr.configure_timezone.return_value = message
+ idrac_connection_configure_timezone_mock.config_mgr.configure_ntp.return_value = message
+ idrac_connection_configure_timezone_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'Message': 'No changes found to commit!',
+ 'Status': 'Success',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_timezone_config_failed_case01(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ idrac_connection_configure_timezone_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_timezone_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ result = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert result == idrac_connection_configure_timezone_mock.config_mgr.is_change_applicable()
+
+ def test_run_idrac_timezone_config_failed_case02(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
+ "Status": "failed"}
+ idrac_connection_configure_timezone_mock.config_mgr.apply_changes.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == {'Message': 'No changes were applied',
+ 'Status': 'failed',
+ 'changed': False,
+ 'changes_applicable': False}
+
+ def test_run_idrac_timezone_config_failed_case03(self, idrac_connection_configure_timezone_mock,
+ idrac_default_args, idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "setup_idrac_timezone": "setuptimezone",
+ "enable_ntp": "Enabled", "ntp_server_1": "ntp server1",
+ "ntp_server_2": "ntp server2", "ntp_server_3": "ntp server3"})
+ message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
+ idrac_connection_configure_timezone_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
+ idrac_connection_configure_timezone_mock.config_mgr.set_liason_share.return_value = message
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ msg = self.module.run_idrac_timezone_config(idrac_connection_configure_timezone_mock, f_module)
+ assert msg == idrac_connection_configure_timezone_mock.config_mgr.is_change_applicable()
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, HTTPError, URLError])
+ def test_main_idrac_configure_timezone_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_configure_timezone_mock,
+ idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'idrac_timezone_ntp.run_idrac_timezone_config',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'idrac_timezone_ntp.run_idrac_timezone_config',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
new file mode 100644
index 00000000..2fa528d0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_user
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from ansible.module_utils._text import to_text
+from io import StringIO
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestIDRACUser(FakeAnsibleModule):
+ module = idrac_user
+
+ @pytest.fixture
+ def idrac_user_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_user_mock(self, mocker, idrac_user_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'idrac_user.iDRACRedfishAPI',
+ return_value=idrac_user_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_user_mock
+ return idrac_conn_mock
+
+ def test_get_payload(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp = self.module.get_payload(f_module, 1, action="update")
+ assert resp["Users.1.UserName"] == idrac_default_args["new_user_name"]
+
+ def test_convert_payload_xml(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ payload = {"Users.1.UserName": idrac_default_args["user_name"],
+ "Users.1.Password": idrac_default_args["user_password"],
+ "Users.1.Enable": idrac_default_args["enable"],
+ "Users.1.Privilege": idrac_default_args["privilege"],
+ "Users.1.IpmiLanPrivilege": idrac_default_args["ipmi_lan_privilege"],
+ "Users.1.IpmiSerialPrivilege": idrac_default_args["ipmi_serial_privilege"],
+ "Users.1.SolEnable": idrac_default_args["sol_enable"],
+ "Users.1.ProtocolEnable": idrac_default_args["protocol_enable"],
+ "Users.1.AuthenticationProtocol": idrac_default_args["authentication_protocol"],
+ "Users.1.PrivacyProtocol": idrac_default_args["privacy_protocol"]}
+ xml_payload, json_payload = self.module.convert_payload_xml(payload)
+ assert json_payload["Users.1#SolEnable"] is True
+
+ def test_remove_user_account_check_mode_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
+ "user_password": None, "privilege": None, "ipmi_lan_privilege": None,
+ "ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
+ "protocol_enable": False, "authentication_protocol": "SHA",
+ "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ slot_id = 1
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ with pytest.raises(Exception) as exc:
+ self.module.remove_user_account(f_module, idrac_connection_user_mock, slot_uri, slot_id)
+ assert exc.value.args[0] == "Changes found to commit!"
+
+ def test_remove_user_account_check_mode_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
+ "user_password": None, "privilege": None, "ipmi_lan_privilege": None,
+ "ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
+ "protocol_enable": False, "authentication_protocol": "SHA",
+ "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.remove_user_account(f_module, idrac_connection_user_mock, None, None)
+ assert exc.value.args[0] == "No changes found to commit!"
+
+ def test_remove_user_account_check_mode_3(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
+ "user_password": None, "privilege": None, "ipmi_lan_privilege": None,
+ "ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
+ "protocol_enable": False, "authentication_protocol": "SHA",
+ "privacy_protocol": "AES"})
+ idrac_connection_user_mock.remove_user_account.return_value = {"success": True}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ slot_id = 1
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
+ self.module.remove_user_account(f_module, idrac_connection_user_mock, slot_uri, slot_id)
+
+ def test_remove_user_account_check_mode_4(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
+ "user_password": None, "privilege": None, "ipmi_lan_privilege": None,
+ "ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
+ "protocol_enable": False, "authentication_protocol": "SHA",
+ "privacy_protocol": "AES"})
+ idrac_connection_user_mock.remove_user_account.return_value = {"success": True}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.remove_user_account(f_module, idrac_connection_user_mock, None, None)
+ assert exc.value.args[0] == 'The user account is absent.'
+
+ def test_get_user_account_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.export_scp",
+ return_value=MagicMock())
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.get_idrac_local_account_attr",
+ return_value={"Users.2#UserName": "test_user", "Users.3#UserName": ""})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ response = self.module.get_user_account(f_module, idrac_connection_user_mock)
+ assert response[0]["Users.2#UserName"] == "test_user"
+ assert response[3] == 3
+
+ def test_get_user_account_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.export_scp",
+ return_value=MagicMock())
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.get_idrac_local_account_attr",
+ return_value={"Users.2#UserName": "test_user", "Users.3#UserName": ""})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ response = self.module.get_user_account(f_module, idrac_connection_user_mock)
+ assert response[3] == 3
+ assert response[4] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
+
+ def test_create_or_modify_account_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
+ return_value={"Message": "Successfully created a request."})
+ empty_slot_id = 2
+ empty_slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(empty_slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
+ response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
+ empty_slot_id, empty_slot_uri, user_attr)
+ assert response[1] == "Successfully created user account."
+
+ def test_create_or_modify_account_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
+ mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
+ None, None, user_attr)
+ assert response[1] == "Successfully updated user account."
+
+ def test_create_or_modify_account_3(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"Users.1#UserName": "test_user"}
+ with pytest.raises(Exception) as exc:
+ self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
+ None, None, user_attr)
+ assert exc.value.args[0] == "Requested changes are already present in the user slot."
+
+ def test_create_or_modify_account_4(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"Users.1#UserName": "test_user"}
+ with pytest.raises(Exception) as exc:
+ self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
+ None, None, user_attr)
+ assert exc.value.args[0] == "No changes found to commit!"
+
+ def test_create_or_modify_account_5(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.2#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"Users.1#UserName": "test_user"}
+ with pytest.raises(Exception) as exc:
+ self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
+ None, None, user_attr)
+ assert exc.value.args[0] == "Changes found to commit!"
+
+ def test_create_or_modify_account_6(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
+ slot_id, slot_uri, user_attr)
+ assert response[1] == "Successfully created user account."
+
+ def test_create_or_modify_account_7(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ with pytest.raises(Exception) as exc:
+ self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
+ slot_id, slot_uri, user_attr)
+ assert exc.value.args[0] == "Changes found to commit!"
+
+ def test_create_or_modify_account_8(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
+ None, None, user_attr)
+ assert response[1] == "Successfully updated user account."
+
+ @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
+ ConnectionError, HTTPError, ImportError, RuntimeError])
+ def test_main(self, exc_type, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account",
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account",
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py
new file mode 100644
index 00000000..94e620f3
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.3.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_virtual_media
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+from mock import PropertyMock
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def virtual_media_conn_mock(mocker, redfish_response_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'idrac_virtual_media.iDRACRedfishAPI')
+ idrac_conn_mock_obj = idrac_conn_mock.return_value.__enter__.return_value
+ idrac_conn_mock_obj.invoke_request.return_value = redfish_response_mock
+ return idrac_conn_mock_obj
+
+
+class TestVirtualMedia(FakeAnsibleModule):
+
+ module = idrac_virtual_media
+
+ def test_validate_params(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
+ idrac_default_args.update(
+ {"virtual_media": [{"index": 1, "insert": True, "image": "//192.168.0.1/path/image.iso"}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as err:
+ self.module._validate_params(f_module, {"index": 1, "insert": True,
+ "image": "//192.168.0.1/path/image.iso"}, "140")
+ assert err.value.args[0] == "CIFS share required username and password."
+ idrac_default_args.update({"virtual_media": [{"index": 1, "insert": True, "username": "user", "password": "pwd",
+ "image": "\\\\192.168.0.1\\path\\image.iso"}]})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result = self.module._validate_params(f_module, {"password": "pwd", "insert": True, "username": "usr",
+ "image": "\\\\192.168.0.1\\path\\image.iso", "index": 1},
+ "141")
+ assert result is None
+
+ def test_get_virtual_media_info(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
+ redfish_response_mock.json_data = {
+ "RedfishVersion": "1.13.1",
+ "VirtualMedia": {"@odata.id": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia"},
+ "Members": [{"Inserted": False, "Image": None},
+ {"Inserted": True, "Image": "//192.168.0.1/file_path/file.iso"}]
+ }
+ resp, vr_id, rd_version = self.module.get_virtual_media_info(virtual_media_conn_mock)
+ assert vr_id == "system"
+ redfish_response_mock.json_data.update({"RedfishVersion": "1.11.1"})
+ resp, vr_id, rd_version = self.module.get_virtual_media_info(virtual_media_conn_mock)
+ assert vr_id == "manager"
+
+ def test_get_payload_data(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}]})
+ each = {"insert": True, "image": "//192.168.0.1/path/file.iso", "index": 1, "media_type": "CD"}
+ vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso",
+ "UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert is_change is True
+ assert input_vr_mem == {'Inserted': True, 'Image': '//192.168.0.1/path/file.iso'}
+ assert vr_mem == {'Inserted': True, 'Image': '//192.168.0.1/path/image_file.iso', 'UserName': 'username',
+ 'Password': 'password', 'Id': 'CD', 'MediaTypes': ['CD', 'DVD']}
+ each.update({"username": "user_name", "password": "password", "domain": "domain",
+ "image": "192.168.0.3:/file_path/image.iso"})
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert is_change is True
+ each.update({"media_type": "USBStick"})
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert unsup_media == 1
+ each = {"insert": False, "index": 1}
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert is_change is True
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "system")
+ assert is_change is True
+ each.update({"username": "user_name", "password": "password", "domain": "domain", "media_type": "CD",
+ "image": "192.168.0.3:/file_path/image.img", "insert": True})
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert unsup_media == 1
+ each.update({"username": "user_name", "password": "password", "domain": "domain", "media_type": "DVD",
+ "image": "192.168.0.3:/file_path/image.img", "insert": True})
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert unsup_media == 1
+
+ def test_domain_name(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}]})
+ each = {"insert": True, "image": "//192.168.0.1/path/file.iso", "index": 1, "media_type": "CD",
+ "domain": "domain", "username": "user", "password": "pwd"}
+ vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso", "domain": "domain",
+ "UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
+ is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
+ assert is_change is True
+
+ def test_virtual_media_operation(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ "force": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.time.sleep', return_value=None)
+ payload = [{
+ "vr_mem": {"Inserted": True, "Actions": {
+ "#VirtualMedia.EjectMedia": {
+ "target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/1/Actions/VirtualMedia.EjectMedia"},
+ "#VirtualMedia.InsertMedia": {
+ "target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/1/Actions/VirtualMedia.InsertMedia"}
+ }},
+ "payload": {"Inserted": True, "Image": "http://192.168.0.1/file_path/file.iso"},
+ "input": {"index": 1, "insert": True, "image": "//192.168.0.1/path/file.iso", "force": True}
+ }]
+ result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "manager")
+ assert result == []
+ idrac_default_args.update({"force": False})
+ result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "manager")
+ assert result == []
+ payload[0]["vr_mem"].update({"Inserted": False})
+ result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "manager")
+ assert result == []
+ payload[0]["vr_mem"].update({"Inserted": True})
+ payload[0]["payload"].update({"Inserted": False})
+ result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "manager")
+ assert result == []
+
+ @pytest.mark.parametrize("exc_type", [HTTPError])
+ def test_virtual_media_operation_http(self, virtual_media_conn_mock, redfish_response_mock,
+ idrac_default_args, mocker, exc_type):
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ "force": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.time.sleep', return_value=None)
+ payload = [{
+ "vr_mem": {"Inserted": True, "Actions": {
+ "#VirtualMedia.EjectMedia": {
+ "target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/CD/Actions/VirtualMedia.EjectMedia"},
+ "#VirtualMedia.InsertMedia": {
+ "target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/CD/Actions/VirtualMedia.InsertMedia"}
+ }},
+ "payload": {"Inserted": True, "Image": "http://192.168.0.1/file_path/file.iso"},
+ "input": {"index": 1, "insert": True, "image": "//192.168.0.1/path/file.iso", "force": True}
+ }]
+ if exc_type == HTTPError:
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.json.load', return_value={
+ "error": {"@Message.ExtendedInfo": [{"MessageId": "VRM0012"}]}
+ })
+ json_str = to_text(json.dumps({"data": "out"}))
+ mocker.patch(
+ MODULE_PATH + 'idrac_virtual_media.time.sleep',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "system")
+ assert result == [{'@Message.ExtendedInfo': [{'MessageId': 'VRM0012'}]}]
+
+ def test_virtual_media(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
+ vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso",
+ "UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.virtual_media_operation', return_value=[])
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media._validate_params', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(True, {}, {}, 1))
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ "force": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
+ assert ex.value.args[0] == "Unable to complete the virtual media operation because unsupported " \
+ "media type provided for index 1"
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.img"}],
+ "force": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
+ assert ex.value.args[0] == "Unable to complete the virtual media operation because " \
+ "unsupported media type provided for index 1"
+ with pytest.raises(Exception) as ex:
+ self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "system", "141")
+ assert ex.value.args[0] == "Unable to complete the virtual media operation because " \
+ "unsupported media type provided for index 1"
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso",
+ "index": 1, "media_type": "CD"}], "force": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(True, {}, {}, None))
+ result = self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
+ assert result == []
+ result = self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "system", "141")
+ assert result == []
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(True, {"Insert": True},
+ {}, None))
+ with pytest.raises(Exception) as ex:
+ self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
+ assert ex.value.args[0] == "Changes found to be applied."
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso",
+ "index": 1, "media_type": "CD"}], "force": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(False, {}, {}, None))
+ with pytest.raises(Exception) as ex:
+ self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_main_success(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"virtual_media": [
+ {"insert": True, "image": "http://192.168.0.1/path/file.iso"},
+ {"insert": True, "image": "192.168.0.2:/file/file.iso"}], "force": True})
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_virtual_media_info',
+ return_value=([{"Insert": True}, {"Insert": True}], "manager", "141"))
+ with pytest.raises(Exception) as ex:
+ self._run_module(idrac_default_args)
+ assert ex.value.args[0]["msg"] == "Unable to complete the operation because the virtual media settings " \
+ "provided exceeded the maximum limit."
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.virtual_media', return_value=[])
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "http://192.168.0.1/path/file.iso"}],
+ "force": True})
+ result = self._run_module(idrac_default_args)
+ assert result == {'changed': True, 'msg': 'Successfully performed the virtual media operation.'}
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.virtual_media', return_value=["error"])
+ with pytest.raises(Exception) as ex:
+ self._run_module(idrac_default_args)
+ assert ex.value.args[0]["msg"] == "Unable to complete the virtual media operation."
+
+ @pytest.mark.parametrize("exc_type", [HTTPError, URLError, ValueError, RuntimeError, SSLValidationError,
+ ConnectionError, KeyError, ImportError, ValueError, TypeError])
+ def test_main_exception(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker, exc_type):
+ idrac_default_args.update({"virtual_media": [{"index": 1, "insert": False}]})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError]:
+ mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_virtual_media_info', side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'idrac_virtual_media.get_virtual_media_info',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py
new file mode 100644
index 00000000..1722a3da
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.0.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_active_directory
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+AD_URI = "AccountService/ExternalAccountProvider/ADAccountProvider"
+TEST_CONNECTION = "AccountService/ExternalAccountProvider/Actions/ExternalAccountProvider.TestADConnection"
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+MAX_AD_MSG = "Unable to add the account provider because the maximum number of configurations allowed for an" \
+ " Active Directory service is {0}."
+CREATE_SUCCESS = "Successfully added the Active Directory service."
+MODIFY_SUCCESS = "Successfully modified the Active Directory service."
+DELETE_SUCCESS = "Successfully deleted the Active Directory service."
+DOM_SERVER_MSG = "Specify the domain server. Domain server is required to create an Active Directory service."
+GRP_DOM_MSG = "Specify the group domain. Group domain is required to create an Active Directory service."
+CERT_INVALID = "The provided certificate file path is invalid or not readable."
+DOMAIN_ALLOWED_COUNT = "Maximum entries allowed for {0} lookup type is {1}."
+TEST_CONNECTION_SUCCESS = "Test Connection is successful. "
+TEST_CONNECTION_FAIL = "Test Connection has failed. "
+ERR_READ_FAIL = "Unable to retrieve the error details."
+INVALID_ID = "The provided Active Directory ID is invalid."
+TIMEOUT_RANGE = "The {0} value is not in the range of {1} to {2}."
+MAX_AD = 2
+MIN_TIMEOUT = 15
+MAX_TIMEOUT = 300
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_active_directory.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_ad(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAD(FakeAnsibleModule):
+ module = ome_active_directory
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": {"name": "domdev"}, "json_data": {"value": [{'Name': 'domdev', 'Id': 12}]},
+ "ad": {'Name': 'domdev', 'Id': 12}, "ad_cnt": 1},
+ {"module_args": {"id": 12}, "json_data": {"value": [{'Name': 'domdev', 'Id': 12}]},
+ "ad": {'Name': 'domdev', 'Id': 12}, "ad_cnt": 1},
+ {"module_args": {"id": 11}, "json_data": {"value": [
+ {'Name': 'domdev', 'Id': 12}, {'Name': 'domdev', 'Id': 13}]}, "ad": {}, "ad_cnt": 2}])
+ def test_get_ad(self, params, ome_connection_mock_for_ad, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["json_data"]
+ ad, ad_cnt = self.module.get_ad(f_module, ome_connection_mock_for_ad)
+ assert ad == params['ad']
+ assert ad_cnt == params['ad_cnt']
+
+ @pytest.mark.parametrize("params", [{
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev"}, "msg": CREATE_SUCCESS}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev"}, "msg": CHANGES_FOUND, "check_mode": True}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "test_connection": True,
+ "domain_username": "user", "domain_password": "passwd"},
+ "msg": "{0}{1}".format(TEST_CONNECTION_SUCCESS, CREATE_SUCCESS)}
+ ])
+ def test_ome_active_directory_create_success(self, params, ome_connection_mock_for_ad, ome_response_mock,
+ ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"Name": "AD1"}
+ mocker.patch(MODULE_PATH + 'get_ad', return_value=params.get("get_ad", (None, 1)))
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [{
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev"},
+ "get_ad": ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120,
+ "ServerPort": 3269, "CertificateValidation": False}, 1),
+ "msg": MODIFY_SUCCESS}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "test_connection": True,
+ "domain_username": "user", "domain_password": "passwd"}, "get_ad":
+ ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"], "DnsServer": [],
+ "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120, "ServerPort": 3269,
+ "CertificateValidation": False}, 1),
+ "msg": "{0}{1}".format(TEST_CONNECTION_SUCCESS, MODIFY_SUCCESS)},
+ {"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "dellemcdomain.com", "name": "domdev"},
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.96.20.181"],
+ "DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120,
+ "ServerPort": 3269, "CertificateValidation": False}, 1),
+ "msg": NO_CHANGES_MSG}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "dellemcdomain.com", "name": "domdev"},
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120,
+ "SearchTimeOut": 120, "ServerPort": 3269, "CertificateValidation": False}, 1),
+ "msg": CHANGES_FOUND, "check_mode": True}
+ ])
+ def test_ome_active_directory_modify_success(self, params, ome_connection_mock_for_ad, ome_response_mock,
+ ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"Name": "AD1"}
+ ome_connection_mock_for_ad.strip_substr_dict.return_value = params.get("get_ad", (None, 1))[0]
+ mocker.patch(MODULE_PATH + 'get_ad', return_value=params.get("get_ad", (None, 1)))
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [{
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "state": "absent"},
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120,
+ "ServerPort": 3269, "CertificateValidation": False}, 1),
+ "msg": DELETE_SUCCESS},
+ {"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "dellemcdomain.com", "name": "domdev1", "state": "absent"},
+ "msg": NO_CHANGES_MSG}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "dellemcdomain.com", "name": "domdev", "state": "absent"},
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120,
+ "SearchTimeOut": 120, "ServerPort": 3269, "CertificateValidation": False}, 1),
+ "msg": CHANGES_FOUND, "check_mode": True}
+ ])
+ def test_ome_active_directory_delete_success(self, params, ome_connection_mock_for_ad, ome_response_mock,
+ ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"Name": "AD1"}
+ ome_connection_mock_for_ad.strip_substr_dict.return_value = params.get("get_ad", (None, 1))[0]
+ mocker.patch(MODULE_PATH + 'get_ad', return_value=params.get("get_ad", (None, 1)))
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": {"domain_controller_lookup": "MANUAL", "group_domain": "domain.com", "name": "domdev"},
+ "msg": DOM_SERVER_MSG}, {"module_args": {"domain_controller_lookup": "MANUAL",
+ "domain_server": ["192.96.20.181", "192.96.20.182", "192.96.20.183",
+ "192.96.20.184"], "group_domain": "domain.com",
+ "name": "domdev"}, "msg": DOMAIN_ALLOWED_COUNT.format("MANUAL", 3)},
+ {"module_args": {"domain_server": ["dom1.com1", "dom2.com"], "group_domain": "domain.com", "name": "domdev"},
+ "msg": DOMAIN_ALLOWED_COUNT.format("DNS", 1)},
+ {"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"], "name": "domdev"},
+ "msg": GRP_DOM_MSG}, {"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "network_timeout": 1},
+ "msg": TIMEOUT_RANGE.format("NetworkTimeOut", MIN_TIMEOUT, MAX_TIMEOUT)}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "search_timeout": 301},
+ "msg": TIMEOUT_RANGE.format("SearchTimeOut", MIN_TIMEOUT, MAX_TIMEOUT)}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev"}, "ad_cnt": 2,
+ "msg": MAX_AD_MSG.format(MAX_AD)}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "name": "domdev", "validate_certificate": True,
+ "certificate_file": "nonexistingcert.crt"}, "msg": CERT_INVALID}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "domain.com", "id": 1234, "validate_certificate": True,
+ "certificate_file": "nonexistingcert.crt"}, "msg": INVALID_ID}
+ ])
+ def test_ome_active_directory_create_fails(self, params, ome_connection_mock_for_ad, ome_response_mock,
+ ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"Name": "AD1"}
+ mocker.patch(MODULE_PATH + 'get_ad', return_value=(None, params.get("ad_cnt", 1)))
+ ome_default_args.update(params['module_args'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [{
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "testconnectionfail.com", "name": "domdev", "test_connection": True,
+ "domain_username": "user", "domain_password": "passwd"},
+ "msg": "{0}{1}".format(TEST_CONNECTION_FAIL, "Unable to connect to the LDAP or AD server."), "is_http": True,
+ "error_info": {
+ "error": {"@Message.ExtendedInfo": [{"Message": "Unable to connect to the LDAP or AD server."}], }}}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "testconnectionfail.com", "name": "domdev", "test_connection": True,
+ "domain_username": "user", "domain_password": "passwd"},
+ "msg": "{0}{1}".format(TEST_CONNECTION_FAIL, ERR_READ_FAIL), "is_http": True, "error_info": {
+ "error1": {"@Message.ExtendedInfo": [{"Message": "Unable to connect to the LDAP or AD server."}], }}}, {
+ "module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
+ "group_domain": "testconnectionfail.com", "name": "domdev", "test_connection": True,
+ "domain_username": "user", "domain_password": "passwd"},
+ "msg": "{0}{1}".format(TEST_CONNECTION_FAIL, "Exception occurrence success."),
+ "error_info": "Exception occurrence success."}, ])
+ def test_ome_active_directory_create_test_conenction_fail(self, params, ome_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'get_ad', return_value=(None, params.get("ad_cnt", 1)))
+ rest_obj_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = rest_obj_class_mock.return_value.__enter__.return_value
+ if params.get("is_http"):
+ json_str = to_text(json.dumps(params['error_info']))
+ ome_connection_mock_obj.invoke_request.side_effect = HTTPError('http://testdellemcomead.com', 404,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ else:
+ ome_connection_mock_obj.invoke_request.side_effect = Exception(params['error_info'])
+ ome_default_args.update(params['module_args'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_active_directory_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_ad, ome_response_mock):
+ ome_default_args.update({"state": "absent", "name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_ad', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_ad', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_ad', side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py
new file mode 100644
index 00000000..b5bc1d94
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py
@@ -0,0 +1,457 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.3.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_alerts_smtp
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+
+SUCCESS_MSG = "Successfully updated the SMTP settings."
+SMTP_URL = "AlertService/AlertDestinations/SMTPConfiguration"
+NO_CHANGES = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_alerts_smtp.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_smtp(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestAppAlertsSMTP(FakeAnsibleModule):
+ module = ome_application_alerts_smtp
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertDestinations.SMTPConfiguration)",
+ "@odata.count": 1,
+ "value": [
+ {
+ "@odata.type": "#AlertDestinations.SMTPConfiguration",
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": ""
+ }
+ }
+ ]
+ }
+ }
+ ])
+ def test_fetch_smtp_settings(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["json_data"]
+ ret_data = self.module.fetch_smtp_settings(ome_connection_mock_for_smtp)
+ assert ret_data.get("DestinationAddress") == "localhost"
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+
+ "json_data": {
+ "DestinationAddress": "localhost",
+ "PortNumber": 25,
+ "UseCredentials": True,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ },
+ "payload": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": "password"
+ }
+ }
+ }
+ ])
+ def test_update_smtp_settings(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["json_data"]
+ payload = params["payload"]
+ ret_data = self.module.update_smtp_settings(ome_connection_mock_for_smtp, payload)
+ assert ret_data.json_data.get("DestinationAddress") == "localhost"
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "payload": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ }
+ }
+ ])
+ def test_update_payload_auth(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data = self.module.update_payload(f_module, payload)
+ assert ret_data.get("DestinationAddress") == "localhost"
+ assert ret_data.get("UseCredentials") is True
+ assert ret_data.get("Credential") is not None
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": False,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "payload": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ }
+ }
+ ])
+ def test_update_payload_without_auth(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data = self.module.update_payload(f_module, payload)
+ assert ret_data.get("DestinationAddress") == "localhost"
+ assert ret_data.get("UseCredentials") is False
+ assert ret_data.get("Credential") is None
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": False,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "payload": {
+ "DestinationAddress": "",
+ "UseCredentials": True,
+ "PortNumber": 26,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ }
+ },
+ {
+ "module_args": {
+ "destination_address": "localhost", "use_ssl": True,
+ "enable_authentication": False,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "payload": {
+ "DestinationAddress": "",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ }
+ },
+ ])
+ def test_get_value(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data = self.module.get_value(f_module, payload, "port_number", "PortNumber")
+ assert ret_data == 25
+
+ @pytest.mark.parametrize("params", [
+ {
+ "payload1": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": "password"
+ }
+ },
+ "payload2": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": "password"
+ }
+ }
+ },
+ ])
+ def test_diff_payload_same(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ payload1 = params["payload1"]
+ payload2 = params["payload2"]
+ diff = self.module._diff_payload(payload1, payload2)
+ assert diff == 0
+
+ @pytest.mark.parametrize("params", [
+ {
+ "payload1": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ },
+ "payload2": {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": "password"
+ }
+ }
+ },
+ ])
+ def test_diff_payload_diff(self, params, ome_connection_mock_for_smtp, ome_response_mock):
+ payload1 = params["payload1"]
+ payload2 = params["payload2"]
+ diff = self.module._diff_payload(payload1, payload2)
+ assert diff is True
+
+ def test_diff_payload_none(self, ome_connection_mock_for_smtp, ome_response_mock):
+ diff = self.module._diff_payload(None, None)
+ assert diff is False
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": {
+ "DestinationAddress": "localhost1",
+ "PortNumber": 25,
+ "UseCredentials": True,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ },
+ }
+ ])
+ def test_module_success(self, mocker, params, ome_connection_mock_for_smtp, ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+
+ get_json_data = {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": ""
+ }
+ }
+
+ update_json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings', return_value=get_json_data)
+ mocker.patch(MODULE_PATH + 'update_payload', return_value=update_json_data)
+ mocker.patch(MODULE_PATH + '_diff_payload', return_value=1)
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == SUCCESS_MSG
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": {
+ "DestinationAddress": "localhost1",
+ "PortNumber": 25,
+ "UseCredentials": True,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ },
+ }
+ ])
+ def test_module_success_no_auth(self, mocker, params, ome_connection_mock_for_smtp, ome_response_mock,
+ ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+
+ get_json_data = {
+ "DestinationAddress": "localhost",
+ "UseCredentials": True,
+ "PortNumber": 25,
+ "UseSSL": False
+ }
+
+ update_json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings', return_value=get_json_data)
+ mocker.patch(MODULE_PATH + 'update_payload', return_value=update_json_data)
+ mocker.patch(MODULE_PATH + '_diff_payload', return_value=1)
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == SUCCESS_MSG
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": {
+ "DestinationAddress": "localhost1",
+ "PortNumber": 25,
+ "UseCredentials": True,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ },
+ }
+ ])
+ def test_module_idempotent(self, mocker, params, ome_connection_mock_for_smtp, ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ get_json_data = params["json_data"]
+ update_json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings', return_value=get_json_data)
+ mocker.patch(MODULE_PATH + 'update_payload', return_value=update_json_data)
+ mocker.patch(MODULE_PATH + '_diff_payload', return_value=0)
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == NO_CHANGES
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": {
+ "DestinationAddress": "localhost1",
+ "PortNumber": 25,
+ "UseCredentials": True,
+ "UseSSL": True,
+ "Credential": {
+ "User": "username",
+ "Password": None
+ }
+ },
+ }
+ ])
+ def test_module_check_mode(self, mocker, params, ome_connection_mock_for_smtp, ome_response_mock,
+ ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ f_module = self.get_module_mock(params=ome_default_args)
+ get_json_data = params["json_data"]
+ update_json_data = params["json_data"]
+
+ f_module.check_mode = True
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 1)
+ assert err.value.args[0] == CHANGES_FOUND
+
+ f_module.check_mode = False
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ @pytest.mark.parametrize("exc_type",
+ [HTTPError, URLError])
+ def test_smtp_main_exception_case(self, mocker, exc_type, ome_connection_mock_for_smtp, ome_response_mock,
+ ome_default_args):
+ ome_default_args.update({"destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ })
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'fetch_smtp_settings',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py
new file mode 100644
index 00000000..ea4551d9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.3.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_alerts_syslog
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_alerts_syslog.'
+
+SUCCESS_MSG = "Successfully updated the syslog forwarding settings."
+DUP_ID_MSG = "Duplicate server IDs are provided."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+
+
+@pytest.fixture
+def ome_connection_mock_for_syslog(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAlertSyslog(FakeAnsibleModule):
+ module = ome_application_alerts_syslog
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": {
+ "syslog_servers": [
+ {
+ "destination_address": "192.168.10.41",
+ "enabled": True,
+ "id": 1,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.46",
+ "enabled": False,
+ "id": 2,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.43",
+ "enabled": False,
+ "id": 3,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.44",
+ "enabled": True,
+ "id": 4,
+ "port_number": 514
+ }
+ ]
+ }, "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertDestinations.SyslogConfiguration)",
+ "@odata.count": 4,
+ "value": [
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 1,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.41",
+ "PortNumber": 514
+ },
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 2,
+ "Enabled": False,
+ "DestinationAddress": "192.168.10.46",
+ "PortNumber": 0
+ },
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 3,
+ "Enabled": False,
+ "DestinationAddress": "192.168.10.43",
+ "PortNumber": 514
+ },
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 4,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.44",
+ "PortNumber": 514
+ }
+ ]
+ }, "msg": NO_CHANGES_MSG},
+ {"module_args": {
+ "syslog_servers": [
+ {
+ "destination_address": "192.168.10.41",
+ "enabled": True,
+ "id": 1,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.46",
+ "enabled": False,
+ "id": 2,
+ "port_number": 514
+ }
+ ]
+ }, "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertDestinations.SyslogConfiguration)",
+ "@odata.count": 4,
+ "value": [
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 1,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.41",
+ "PortNumber": 511
+ },
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 2,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.46",
+ "PortNumber": 514
+ }
+ ]
+ }, "msg": SUCCESS_MSG},
+ {"check_mode": True, "module_args": {
+ "syslog_servers": [
+ {
+ "destination_address": "192.168.10.41",
+ "enabled": True,
+ "id": 1,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.46",
+ "enabled": False,
+ "id": 2,
+ "port_number": 514
+ }
+ ]
+ }, "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertDestinations.SyslogConfiguration)",
+ "@odata.count": 4,
+ "value": [
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 1,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.41",
+ "PortNumber": 511
+ },
+ {
+ "@odata.type": "#AlertDestinations.SyslogConfiguration",
+ "Id": 2,
+ "Enabled": True,
+ "DestinationAddress": "192.168.10.46",
+ "PortNumber": 514
+ }
+ ]
+ }, "msg": CHANGES_FOUND},
+ {"module_args": {
+ "syslog_servers": []
+ }, "json_data": {}, "msg": NO_CHANGES_MSG},
+ {"module_args": {
+ "syslog_servers": [
+ {
+ "destination_address": "192.168.10.41",
+ "enabled": True,
+ "id": 1,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.46",
+ "enabled": False,
+ "id": 2,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.43",
+ "enabled": False,
+ "id": 3,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.44",
+ "enabled": True,
+ "id": 4,
+ "port_number": 514
+ },
+ {
+ "destination_address": "192.168.10.44",
+ "enabled": True,
+ "id": 4,
+ "port_number": 514
+ }
+ ]
+ }, "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertDestinations.SyslogConfiguration)",
+ "@odata.count": 4,
+ "value": []
+ }, "msg": DUP_ID_MSG},
+ ])
+ def test_ome_alert_syslog_success(self, params, ome_connection_mock_for_syslog,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get("json_data")
+ ome_connection_mock_for_syslog.strip_substr_dict.return_value = params.get("json_data")
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLValidationError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_alert_syslog_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_syslog, ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'validate_input', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'validate_input', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'validate_input',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
new file mode 100644
index 00000000..c31983bc
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_certificate
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_certificate(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'ome_application_certificate.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAppCSR(FakeAnsibleModule):
+ module = ome_application_certificate
+
+ @pytest.mark.parametrize("exc_type",
+ [ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_certificate_main_error_cases(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_application_certificate,
+ ome_response_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ args = {"command": "generate_csr", "distinguished_name": "hostname.com",
+ "department_name": "Remote Access Group", "business_name": "Dell Inc.",
+ "locality": "Round Rock", "country_state": "Texas", "country": "US",
+ "email": "support@dell.com"}
+ ome_default_args.update(args)
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
+ side_effect=exc_type("TEST"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'csr_status' not in result
+ assert 'msg' in result
+
+ def test_get_resource_parameters_generate(self, mocker, ome_default_args,
+ ome_connection_mock_for_application_certificate,
+ ome_response_mock):
+ args = {"command": "generate_csr", "distinguished_name": "hostname.com",
+ "department_name": "Remote Access Group", "business_name": "Dell Inc.",
+ "locality": "Round Rock", "country_state": "Texas", "country": "US",
+ "email": "support@dell.com"}
+ f_module = self.get_module_mock(params=args)
+ result = self.module.get_resource_parameters(f_module)
+ assert result[0] == "POST"
+ assert result[1] == "ApplicationService/Actions/ApplicationService.GenerateCSR"
+ assert result[2] == {'DistinguishedName': 'hostname.com', 'Locality': 'Round Rock',
+ 'DepartmentName': 'Remote Access Group', 'BusinessName': 'Dell Inc.',
+ 'State': 'Texas', 'Country': 'US', 'Email': 'support@dell.com'}
+
+ def test_upload_csr_fail01(self, mocker, ome_default_args, ome_connection_mock_for_application_certificate,
+ ome_response_mock):
+ args = {"command": "upload", "upload_file": "/path/certificate.cer"}
+ f_module = self.get_module_mock(params=args)
+ with pytest.raises(Exception) as exc:
+ self.module.get_resource_parameters(f_module)
+ assert exc.value.args[0] == "No such file or directory."
+
+ def test_upload_csr_success(self, mocker, ome_default_args, ome_connection_mock_for_application_certificate,
+ ome_response_mock):
+ payload = "--BEGIN-REQUEST--"
+ mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
+ return_value=("POST", "ApplicationService/Actions/ApplicationService.UploadCertificate", payload))
+ ome_default_args.update({"command": "upload", "upload_file": "/path/certificate.cer"})
+ ome_response_mock.success = True
+ result = self.execute_module(ome_default_args)
+ assert result['msg'] == "Successfully uploaded application certificate."
+
+ def test_generate_csr(self, mocker, ome_default_args, ome_connection_mock_for_application_certificate,
+ ome_response_mock):
+ csr_json = {"CertificateData": "--BEGIN-REQUEST--"}
+ payload = {"DistinguishedName": "hostname.com", "DepartmentName": "Remote Access Group",
+ "BusinessName": "Dell Inc.", "Locality": "Round Rock", "State": "Texas",
+ "Country": "US", "Email": "support@dell.com"}
+ mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
+ return_value=("POST", "ApplicationService/Actions/ApplicationService.GenerateCSR", payload))
+ ome_default_args.update({"command": "generate_csr", "distinguished_name": "hostname.com",
+ "department_name": "Remote Access Group", "business_name": "Dell Inc.",
+ "locality": "Round Rock", "country_state": "Texas", "country": "US",
+ "email": "support@dell.com"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = csr_json
+ result = self.execute_module(ome_default_args)
+ assert result['msg'] == "Successfully generated certificate signing request."
+ assert result['csr_status'] == {'CertificateData': '--BEGIN-REQUEST--'}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
new file mode 100644
index 00000000..3a86a3f0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
@@ -0,0 +1,2240 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ssl import SSLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_console_preferences
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+
+SUCCESS_MSG = "Successfully updated the Console Preferences settings."
+SETTINGS_URL = "ApplicationService/Settings"
+NO_CHANGES = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+HEALTH_CHECK_UNIT_REQUIRED = "The health check unit is required when health check interval is specified."
+HEALTH_CHECK_INTERVAL_REQUIRED = "The health check interval is required when health check unit is specified."
+HEALTH_CHECK_INTERVAL_INVALID = "The health check interval specified is invalid for the {0}"
+JOB_URL = "JobService/Jobs"
+CIFS_URL = "ApplicationService/Actions/ApplicationService.UpdateShareTypeSettings"
+CONSOLE_SETTINGS_VALUES = ["DATA_PURGE_INTERVAL", "EMAIL_SENDER", "TRAP_FORWARDING_SETTING",
+ "MX7000_ONBOARDING_PREF", "REPORTS_MAX_RESULTS_LIMIT",
+ "DISCOVERY_APPROVAL_POLICY", "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DEVICE_PREFERRED_NAME", "INVALID_DEVICE_HOSTNAME", "COMMON_MAC_ADDRESSES",
+ "CONSOLE_CONNECTION_SETTING", "MIN_PROTOCOL_VERSION", "SHARE_TYPE"]
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_console_preferences.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_console_preferences(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAppConsolePreferences(FakeAnsibleModule):
+ module = ome_application_console_preferences
+
+ @pytest.mark.parametrize("params", [{"module_args": {"report_row_limit": 123,
+ "mx7000_onboarding_preferences": "all",
+ "email_sender_settings": "admin@dell.com",
+ "trap_forwarding_format": "Normalized",
+ "metrics_collection_settings": 361},
+ "json_data": {"value": [
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ ]},
+ }])
+ def test_fetch_cp_settings(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock):
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["json_data"]
+ ret_data = self.module.fetch_cp_settings(ome_connection_mock_for_application_console_preferences)
+ assert ret_data == params["json_data"]["value"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 55,
+ "health_check_interval_unit": "Minutes"}},
+ "json_data": {"@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0/1 * 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}},
+ }])
+ def test_job_details(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock):
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = {"value": [params["json_data"]]}
+ ret_data = self.module.job_details(ome_connection_mock_for_application_console_preferences)
+ assert ret_data == params["json_data"]
+
+ @pytest.mark.parametrize("params",
+ [
+ {"module_args":
+ {
+ "report_row_limit": 123,
+ "mx7000_onboarding_preferences": "all",
+ "email_sender_settings": "admin@dell.com",
+ "trap_forwarding_format": "Normalized",
+ "metrics_collection_settings": 361
+ },
+ "payload":
+ {"ConsoleSetting":
+ [
+ {
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "AsIs",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "SLOT_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ }
+ ]},
+ "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "json_data": {"value": [
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }
+ ])
+ def test_create_payload_success(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ curr_payload = params["json_data"]["value"]
+ ret_payload, payload_dict = self.module.create_payload(ome_connection_mock_for_application_console_preferences,
+ curr_payload)
+ assert payload_dict == params["curr_payload"]
+
+ @pytest.mark.parametrize("params",
+ [
+ {"module_args":
+ {
+ "metrics_collection_settings": "361"
+ },
+ "payload":
+ {"ConsoleSetting":
+ [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ }
+ ]},
+ "curr_payload":
+ {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "json_data": {"value": [
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }])
+ def test_create_payload_success_case02(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ # ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ curr_payload = params["json_data"]["value"]
+ ret_payload, payload_dict = self.module.create_payload(f_module, curr_payload)
+ assert ret_payload == params["payload"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"builtin_appliance_share": {"share_options": "CIFS",
+ "cifs_options": "V1"}},
+ "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V1",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"}]},
+ "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }])
+ def test_create_payload_success_case03(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ # ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ curr_payload = params["json_data"]["value"]
+ ret_payload, payload_dict = self.module.create_payload(f_module, curr_payload)
+ assert ret_payload == params["payload"]
+
+ @pytest.mark.parametrize("params", [
+ {
+ "payload": {
+ "ConsoleSetting": [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "AsIs",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "SLOT_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ }
+ ]
+ },
+ "cifs_payload": {
+ "ConsoleSetting": [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"
+ }
+ ]
+ },
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": None,
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "job_data":
+ {
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0/1 * 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task",
+ "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}},
+ "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ "DISCOVERY_APPROVAL_POLICY": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}
+
+ },
+ "schedule": None,
+ "module_args": {
+ "report_row_limit": 123,
+ }
+ }
+ ])
+ def test_update_console_preferences(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_default_args.update(params["module_args"])
+ # ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module, ome_connection_mock_for_application_console_preferences,
+ params["payload"], params["cifs_payload"],
+ params["job_payload"], params["job_data"],
+ params["payload_dict"], params["schedule"])
+ assert final_resp.status_code == 200
+
+ @pytest.mark.parametrize("params", [
+ {
+ "payload": {
+ "ConsoleSetting": [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "AsIs",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "SLOT_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"
+ }
+ ]
+ },
+ "cifs_payload": {
+ "ConsoleSetting": [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"
+ }
+ ]
+ },
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": None,
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "job_data":
+ {
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0/1 * 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task",
+ "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}},
+ "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}
+
+ },
+ "schedule": "0 0 0/5 1/1 * ? *",
+ "module_args": {
+ "builtin_appliance_share": {"share_options": "HTTPS", "cifs_options": "V2"}
+ }
+ }
+ ])
+ def test_update_console_preferences_case02(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_default_args.update(params["module_args"])
+ # ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module,
+ ome_connection_mock_for_application_console_preferences,
+ params["payload"],
+ params["cifs_payload"],
+ params["job_payload"],
+ params["job_data"],
+ params["payload_dict"],
+ params["schedule"])
+ assert cifs_resp.success is True
+
+ @pytest.mark.parametrize("params", [{"payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "AsIs",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "SLOT_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"}]},
+ "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]},
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": "0 0 0/5 1/1 * ? *",
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "job_data": {"@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0/1 * 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}},
+ "payload_dict": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_"
+ "DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "schedule": "0 0 0/5 1/1 * ? *",
+ "module_args": {"device_health": {"health_check_interval": 50,
+ "health_check_interval_unit": "Minutes"}}}])
+ def test_update_console_preferences_case03(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_default_args.update(params["module_args"])
+ # ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ final_resp, cifs_resp, job_resp = self.module.update_console_preferences(f_module,
+ ome_connection_mock_for_application_console_preferences,
+ params["payload"],
+ params["cifs_payload"],
+ params["job_payload"],
+ params["job_data"],
+ params["payload_dict"],
+ params["schedule"])
+ assert job_resp.success is True
+
+ @pytest.mark.parametrize("params", [{"module_args": {"report_row_limit": 123},
+ "payload": {"ConsoleSetting": [{"Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "AsIs",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "SLOT_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_SYSTEM_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"}]},
+ "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_"
+ "DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }])
+ def test_create_payload_dict(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ curr_payload = params["json_data"]["value"]
+ ret_payload = self.module.create_payload_dict(curr_payload)
+ assert ret_payload == params["curr_payload"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"builtin_appliance_share": {"share_options": "CIFS",
+ "cifs_options": "V2"}},
+ "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]},
+ "curr_payload": {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_"
+ "DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}, }])
+ def test_create_cifs_payload(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [params["payload"]]}
+ f_module = self.get_module_mock(params=params['module_args'])
+ curr_payload = params["json_data"]["value"]
+ ret_payload = self.module.create_cifs_payload(ome_connection_mock_for_application_console_preferences,
+ curr_payload)
+ assert ret_payload.get("ConsoleSetting")[0]["Name"] == params["payload"]["ConsoleSetting"][0]["Name"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 50,
+ "health_check_interval_unit": "Minutes"}},
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": None,
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]}}])
+ def test_create_job(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["job_payload"]
+ ome_default_args.update(params['module_args'])
+ job_payload, schedule = self.module.create_job(ome_connection_mock_for_application_console_preferences)
+ assert job_payload == params["job_payload"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 5,
+ "health_check_interval_unit": "Hourly"}},
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": "0 0 0/5 1/1 * ? *",
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "schedule": "0 0 0/5 1/1 * ? *"}])
+ def test_create_job_case02(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["job_payload"]
+ ome_default_args.update(params['module_args'])
+ job_payload, schedule = self.module.create_job(f_module)
+ assert schedule == params["schedule"]
+
+ @pytest.mark.parametrize("params", [{"module_args": {"device_health": {"health_check_interval": 5,
+ "health_check_interval_unit": "Minutes"}},
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": "0 0/5 * 1/1 * ? *",
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "schedule": "0 0/5 * 1/1 * ? *"}])
+ def test_create_job_case03(self, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=params['module_args'])
+ ome_response_mock.json_data = params["job_payload"]
+ ome_default_args.update(params['module_args'])
+ job_payload, schedule = self.module.create_job(f_module)
+ assert schedule == params["schedule"]
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"metrics_collection_settings": 361},
+ "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]},
+ "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""}, ]},
+ "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""}]}, }])
+ def test_module_idempotent(self, mocker, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ curr_resp = params["cp_data"]["value"]
+ payload = params["payload"]
+ cifs_payload = params["cifs_payload"]
+ schedule = None
+ job = None
+ diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job)
+ assert diff == 0
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"metrics_collection_settings": 361},
+ "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]},
+ "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""}, ]},
+ "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "365",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""}]}, }])
+ def test_module_idempotent_case02(self, mocker, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ curr_resp = params["cp_data"]["value"]
+ payload = params["payload"]
+ cifs_payload = params["cifs_payload"]
+ schedule = None
+ job = None
+ diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job)
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"device_health": {"health_check_interval": 5,
+ "health_check_interval_unit": "Hourly"}},
+ "json_data": {"@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0 0/5 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}},
+ "cp_data":
+ {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""}, ]},
+ "schedule": "0 0 0/5 1/1 * ? *",
+ "payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "365",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""}]},
+ "cifs_payload": {"ConsoleSetting": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}]}}])
+ def test_module_idempotent_case03(self, mocker, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ curr_resp = params["cp_data"]["value"]
+ payload = params["payload"]
+ cifs_payload = params["cifs_payload"]
+ schedule = params["schedule"]
+ job = params["json_data"]
+ diff = self.module._diff_payload(curr_resp, payload, cifs_payload, schedule, job)
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"device_health": {"health_check_interval": 100,
+ "health_check_interval_unit": "Minutes"}
+ }}])
+ def test__validate_params_fail_case01(self, params, ome_connection_mock_for_application_console_preferences):
+ health = params['module_args'].get("device_health").get("health_check_interval_unit")
+ f_module = self.get_module_mock(params=params['module_args'])
+ with pytest.raises(Exception) as exc:
+ self.module._validate_params(f_module)
+ assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_INVALID.format(health)
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"device_health": {"health_check_interval_unit": "Minutes"}
+ }}])
+ def test__validate_params_fail_case02(self, params, ome_connection_mock_for_application_console_preferences):
+ f_module = self.get_module_mock(params=params['module_args'])
+ with pytest.raises(Exception) as exc:
+ self.module._validate_params(f_module)
+ assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_REQUIRED
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"device_health": {"health_check_interval": 50}
+ }}])
+ def test__validate_params_fail_case03(self, params, ome_connection_mock_for_application_console_preferences):
+ f_module = self.get_module_mock(params=params['module_args'])
+ with pytest.raises(Exception) as exc:
+ self.module._validate_params(f_module)
+ assert exc.value.args[0] == HEALTH_CHECK_UNIT_REQUIRED
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"device_health": {"health_check_interval": 100,
+ "health_check_interval_unit": "Hourly"}
+ }}])
+ def test__validate_params_fail_case04(self, params, ome_connection_mock_for_application_console_preferences):
+ health = params['module_args'].get("device_health").get("health_check_interval_unit")
+ f_module = self.get_module_mock(params=params['module_args'])
+ with pytest.raises(Exception) as exc:
+ self.module._validate_params(f_module)
+ assert exc.value.args[0] == HEALTH_CHECK_INTERVAL_INVALID.format(health)
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {"report_row_limit": 123,
+ "mx7000_onboarding_preferences": "all",
+ "email_sender_settings": "admin@dell.com",
+ "trap_forwarding_format": "Normalized",
+ "metrics_collection_settings": 361
+ },
+ "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""}, ]}, }])
+ def test_module_check_mode(self, mocker, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = {"value": [params["json_data"]]}
+ ome_default_args.update(params['module_args'])
+ f_module = self.get_module_mock(params=ome_default_args)
+ get_json_data = params["json_data"]
+ update_json_data = params["json_data"]
+
+ f_module.check_mode = True
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 1)
+ assert err.value.args[0] == CHANGES_FOUND
+
+ f_module.check_mode = False
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ @pytest.mark.parametrize("params", [
+ {
+ "job_details": {
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10093)",
+ "Id": 10093,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "NextRun": "2022-03-15 05:25:00.0",
+ "LastRun": "2022-03-15 05:24:00.043",
+ "StartTime": None,
+ "EndTime": None,
+ "Schedule": "0 0/1 * 1/1 * ? *",
+ "State": "Enabled",
+ "CreatedBy": "admin",
+ "UpdatedBy": None,
+ "Visible": None,
+ "Editable": None,
+ "Builtin": False,
+ "UserGenerated": True,
+ "Targets": [{"JobId": 10093, "Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}],
+ "Params": [{"JobId": 10093, "Key": "metricType", "Value": "40, 50"}],
+ "LastRunStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2060, "Name": "Completed"},
+ "JobType": {"@odata.type": "#JobService.JobType", "Id": 6, "Name": "Health_Task", "Internal": False},
+ "JobStatus": {"@odata.type": "#JobService.JobStatus", "Id": 2020, "Name": "Scheduled"},
+ "ExecutionHistories@odata.navigationLink": "/api/JobService/Jobs(10093)/ExecutionHistories",
+ "LastExecutionDetail": {"@odata.id": "/api/JobService/Jobs(10093)/LastExecutionDetail"}
+ },
+ "job_payload": {"Id": 0,
+ "JobName": "Global Health Task",
+ "JobDescription": "Global Health Task",
+ "Schedule": None,
+ "State": "Enabled",
+ "JobType": {"Id": 6, "Name": "Health_Task"},
+ "Params": [{"Key": "metricType", "Value": "40, 50"}],
+ "Targets": [{"Id": 500, "Data": "", "TargetType": {"Id": 6000, "Name": "GROUP"}}]},
+ "cp_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""}, ]},
+ "payload_dict":
+ {"DATA_PURGE_INTERVAL": {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "TRAP_FORWARDING_SETTING":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "MX7000_ONBOARDING_PREF":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "REPORTS_MAX_RESULTS_LIMIT":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""},
+ "EMAIL_SENDER":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "DISCOVERY_APPROVAL_POLICY":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DISCOVERY_APPROVAL_POLICY",
+ "DefaultValue": "Automatic",
+ "Value": "Automatic",
+ "DataType": "java.lang.String",
+ "GroupName": ""},
+ "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "NODE_INITIATED_DISCOVERY_SET_TRAP_DESTINATION",
+ "DefaultValue": "false",
+ "Value": "true",
+ "DataType": "java.lang.Boolean",
+ "GroupName": ""},
+ "DEVICE_PREFERRED_NAME":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DEVICE_PREFERRED_NAME",
+ "DefaultValue": "HOST_NAME",
+ "Value": "PREFER_DNS,PREFER_IDRAC_HOSTNAME",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "INVALID_DEVICE_HOSTNAME":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "INVALID_DEVICE_HOSTNAME",
+ "DefaultValue": "",
+ "Value": "localhost",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "COMMON_MAC_ADDRESSES":
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "COMMON_MAC_ADDRESSES",
+ "DefaultValue": "",
+ "Value": "::",
+ "DataType": "java.lang.String",
+ "GroupName": "DISCOVERY_SETTING"},
+ "MIN_PROTOCOL_VERSION": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MIN_PROTOCOL_VERSION",
+ "DefaultValue": "V2",
+ "Value": "V2",
+ "DataType": "java.lang.String",
+ "GroupName": "CIFS_PROTOCOL_SETTINGS"},
+ "CONSOLE_CONNECTION_SETTING": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "CONSOLE_CONNECTION_SETTING",
+ "DefaultValue": "last_known",
+ "Value": "last_known",
+ "DataType": "java.lang.String",
+ "GroupName": "CONSOLE_CONNECTION_SETTING"},
+ "SHARE_TYPE": {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "SHARE_TYPE",
+ "DefaultValue": "CIFS",
+ "Value": "CIFS",
+ "DataType": "java.lang.String",
+ "GroupName": "BUILT_IN_APPLIANCE_SHARE_SETTINGS"}},
+ "payload":
+ {"ConsoleSetting":
+ [
+ {
+ "@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ }]},
+ "cifs_payload":
+ {"ConsoleSetting": []},
+ "module_args": {"metrics_collection_settings": 300},
+ "json_data": {"value": [{"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "DATA_PURGE_INTERVAL",
+ "DefaultValue": "365",
+ "Value": "361",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "TRAP_FORWARDING_SETTING",
+ "DefaultValue": "AsIs",
+ "Value": "Normalized",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "MX7000_ONBOARDING_PREF",
+ "DefaultValue": "all",
+ "Value": "all",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "REPORTS_MAX_RESULTS_LIMIT",
+ "DefaultValue": "0",
+ "Value": "123",
+ "DataType": "java.lang.Integer",
+ "GroupName": ""
+ },
+ {"@odata.type": "#ApplicationService.ConsoleSetting",
+ "Name": "EMAIL_SENDER",
+ "DefaultValue": "omcadmin@dell.com",
+ "Value": "admin@dell.com",
+ "DataType": "java.lang.String",
+ "GroupName": ""
+ }, ]}, }])
+ def test_module_success(self, mocker, params, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ # ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ mocker.patch(MODULE_PATH + 'job_details', return_value=params["job_details"])
+ mocker.patch(MODULE_PATH + 'create_job', return_value=(None, None))
+ mocker.patch(MODULE_PATH + 'fetch_cp_settings', return_value=params["cp_data"]["value"])
+ mocker.patch(MODULE_PATH + 'create_payload', return_value=(params["payload"], params["payload_dict"]))
+ mocker.patch(MODULE_PATH + 'create_cifs_payload', return_value=params["cifs_payload"])
+ mocker.patch(MODULE_PATH + '_diff_payload', return_value=1)
+ # mocker.patch(MODULE_PATH + 'update_payload', return_value=update_json_data)
+ # mocker.patch(MODULE_PATH + '_diff_payload', return_value=1)
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == SUCCESS_MSG
+
+ @pytest.mark.parametrize("exc_type", [HTTPError, URLError])
+ def test_cp_main_exception_case(self, mocker, exc_type, ome_connection_mock_for_application_console_preferences,
+ ome_response_mock, ome_default_args):
+ ome_default_args.update({"device_health": {"health_check_interval": 65,
+ "health_check_interval_unit": "Minutes"}})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + '_validate_params', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + '_validate_params', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + '_validate_params',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py
new file mode 100644
index 00000000..3938184e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py
@@ -0,0 +1,425 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import pytest
+from io import StringIO
+from ssl import SSLError
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_address
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_network_address(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_application_network_address.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAppNetwork(FakeAnsibleModule):
+ module = ome_application_network_address
+
+ inp_param = {
+ "hostname": "192.1.2.3",
+ "password": "password",
+ "port": 443,
+ "username": "root",
+ "enable_nic": True,
+ "interface_name": "eth0",
+ "dns_configuration": {"dns_domain_name": "localdomain", "dns_name": "openmanage-enterprise",
+ "register_with_dns": False,
+ "use_dhcp_for_dns_domain_name": False},
+ "ipv4_configuration": {"enable": True, "enable_dhcp": True, "use_dhcp_for_dns_server_names": True,
+ "static_ip_address": "192.168.11.20", "static_subnet_mask": "255.255.255.0",
+ "static_gateway": "192.168.11.1", "static_preferred_dns_server": "192.168.11.2",
+ "static_alternate_dns_server": "192.168.11.3"},
+ "ipv6_configuration": {"enable": True, "enable_auto_configuration": True,
+ "static_alternate_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121",
+ "static_gateway": "0000::ffff",
+ "static_ip_address": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "static_preferred_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "static_prefix_length": 0, "use_dhcp_for_dns_server_names": True},
+ "management_vlan": {"enable_vlan": False, "vlan_id": 0},
+ "reboot_delay": 1}
+ inp_param1 = {
+ "hostname": "192.1.2.3",
+ "password": "password",
+ "port": 443,
+ "username": "root",
+ "enable_nic": False
+ }
+ out_param = {"EnableNIC": False,
+ "InterfaceName": "eth0",
+ "PrimaryInterface": True,
+ "Ipv4Configuration": {"Enable": True, "EnableDHCP": True, "StaticIPAddress": "192.168.11.20",
+ "StaticSubnetMask": "255.255.255.0", "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.2",
+ "StaticAlternateDNSServer": "192.168.11.3"},
+ "Ipv6Configuration": {"Enable": True, "EnableAutoConfiguration": True,
+ "StaticIPAddress": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "StaticPrefixLength": 0, "StaticGateway": "0000::ffff",
+ "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "StaticAlternateDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121"},
+ "ManagementVLAN": {"EnableVLAN": False, "Id": 0},
+ "DnsConfiguration": {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"},
+ "Delay": 0
+ }
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param, "out": out_param},
+ {"in": inp_param1, "out": out_param}])
+ def test_ome_application_network_address_main_success_case_01(self, mocker, ome_default_args, addr_param,
+ ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ IP_CONFIG = "ApplicationService/Network/AddressConfiguration"
+ JOB_IP_CONFIG = "ApplicationService/Network/AdapterConfigurations"
+ POST_IP_CONFIG = "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+ ome_default_args.update(addr_param["in"])
+ ipv4 = {"Enable": True, "EnableDHCP": True, "StaticIPAddress": "192.168.11.20",
+ "StaticSubnetMask": "255.255.255.0", "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.1",
+ "StaticAlternateDNSServer": ""}
+ ipv6 = {"Enable": False, "EnableAutoConfiguration": True, "StaticIPAddress": "",
+ "StaticPrefixLength": 0, "StaticGateway": "", "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "", "StaticAlternateDNSServer": ""}
+ dns = {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"}
+ vlan = {"EnableVLAN": False, "Id": 1}
+ mocker.patch(MODULE_PATH + "ome_application_network_address.validate_input")
+ mocker.patch(MODULE_PATH + "ome_application_network_address.get_payload",
+ return_value=(ipv4, ipv6, dns, vlan))
+ mocker.patch(MODULE_PATH + "ome_application_network_address.get_updated_payload",
+ return_value=(addr_param["out"], "PUT", IP_CONFIG))
+ ome_response_mock.json_data = addr_param["out"]
+ ome_response_mock.success = True
+ mresult = self.execute_module(ome_default_args)
+ assert mresult['changed'] is True
+ assert "msg" in mresult
+ assert "network_configuration" in mresult and mresult["network_configuration"] == addr_param["out"]
+ assert mresult["msg"] == "Successfully triggered task to update network address configuration."
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param, "out": out_param}])
+ def test_ome_application_network_address_main_success_case_02(self, mocker, ome_default_args, addr_param,
+ ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ POST_IP_CONFIG = "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+ ome_default_args.update(addr_param["in"])
+ ipv4 = {"Enable": True, "EnableDHCP": True, "StaticIPAddress": "192.168.11.20",
+ "StaticSubnetMask": "255.255.255.0", "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.1",
+ "StaticAlternateDNSServer": ""}
+ ipv6 = {"Enable": False, "EnableAutoConfiguration": True, "StaticIPAddress": "",
+ "StaticPrefixLength": 0, "StaticGateway": "", "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "", "StaticAlternateDNSServer": ""}
+ dns = {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"}
+ vlan = {"EnableVLAN": False, "Id": 1}
+ mocker.patch(MODULE_PATH + "ome_application_network_address.validate_input")
+ mocker.patch(MODULE_PATH + "ome_application_network_address.get_payload",
+ return_value=(ipv4, ipv6, dns, vlan))
+ mocker.patch(MODULE_PATH + "ome_application_network_address.get_updated_payload",
+ return_value=(addr_param["out"], "POST", POST_IP_CONFIG))
+ ome_response_mock.json_data = addr_param["out"]
+ ome_response_mock.success = True
+ mresult = self.execute_module(ome_default_args)
+ assert mresult['changed'] is True
+ assert "msg" in mresult
+ assert "network_configuration" in mresult and mresult["network_configuration"] == addr_param["out"]
+ assert mresult["msg"] == "Successfully triggered job to update network address configuration."
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param, "out": out_param}])
+ def test_get_payload(self, addr_param, ome_default_args):
+ ome_default_args.update(addr_param["in"])
+ f_module = self.get_module_mock(params=addr_param["in"])
+ ipv4_payload, ipv6_payload, dns_payload, vlan_payload = self.module.get_payload(f_module)
+ assert ipv4_payload == addr_param["out"]["Ipv4Configuration"]
+ assert ipv6_payload == addr_param["out"]["Ipv6Configuration"]
+ assert dns_payload == addr_param["out"]["DnsConfiguration"]
+ assert vlan_payload == addr_param["out"]["ManagementVLAN"]
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param, "out": out_param}])
+ def test_get_updated_payload(self, mocker, ome_default_args, addr_param,
+ ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ ome_default_args.update(addr_param["in"])
+ f_module = self.get_module_mock(params=addr_param["in"])
+ ome_response_mock.json_data = {"value": [addr_param["out"]]}
+ ipv4 = {"Enable": True, "EnableDHCP": True, "StaticIPAddress": "192.168.11.20",
+ "StaticSubnetMask": "255.255.255.0", "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.2",
+ "StaticAlternateDNSServer": "192.168.11.3"}
+ ipv6 = {"Enable": True, "EnableAutoConfiguration": False,
+ "StaticIPAddress": "2607:f2b1:f081:9:1c8c:f1c7:47e:f12",
+ "StaticPrefixLength": 0, "StaticGateway": "0000::ffff", "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "StaticAlternateDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f12"}
+ dns = {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"}
+ vlan = {"EnableVLAN": False, "Id": 1}
+ current_setting, method, uri = self.module.get_updated_payload(
+ ome_connection_mock_for_application_network_address, f_module, ipv4, ipv6, dns, vlan)
+ assert current_setting == addr_param["out"]
+
+ def test_get_updated_payload_when_same_setting_failure_case(self, ome_default_args,
+ ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ ipv4 = {"Enable": True, "EnableDHCP": True, "StaticIPAddress": "192.168.11.20",
+ "StaticSubnetMask": "255.255.255.0", "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.2",
+ "StaticAlternateDNSServer": "192.168.11.3"}
+ ipv6 = {"Enable": True, "EnableAutoConfiguration": True,
+ "StaticIPAddress": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "StaticPrefixLength": 0, "StaticGateway": "0000::ffff", "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "StaticAlternateDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121"}
+ dns = {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"}
+ vlan = {"EnableVLAN": False, "Id": 1}
+ current_setting = {"value": [{
+ "@odata.context": "/api/$metadata#Network.AddressConfiguration/$entity",
+ "@odata.type": "#Network.AddressConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/AddressConfiguration",
+ "EnableNIC": True,
+ "InterfaceName": "eth0",
+ "PrimaryInterface": True,
+ "Ipv4Configuration": ipv4,
+ "Ipv6Configuration": ipv6,
+ "DnsConfiguration": dns,
+ "ManagementVLAN": vlan,
+ "Delay": 0
+ }]}
+ ome_default_args.update({"enable_nic": True, "interface_name": "eth0"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ error_message = "No changes found to be applied."
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_address, f_module, ipv4, ipv6,
+ dns, vlan)
+
+ @pytest.mark.parametrize("addr_param",
+ [{"in": inp_param["ipv4_configuration"], "out": out_param["Ipv4Configuration"]},
+ {"in": {"enable": True, "enable_auto_configuration": True,
+ "static_alternate_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121",
+ "static_gateway": "0000::ffff",
+ "static_ip_address": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "static_preferred_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "static_prefix_length": 0, "use_dhcp_for_dns_server_names": True},
+ "out": {"Enable": True, "EnableAutoConfiguration": True,
+ "StaticIPAddress": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "StaticPrefixLength": 0, "StaticGateway": "0000::ffff",
+ "UseDHCPForDNSServerNames": True,
+ "StaticPreferredDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "StaticAlternateDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121"}},
+ {"in": inp_param["dns_configuration"], "out": out_param["DnsConfiguration"]},
+ {"in": None, "out": None}])
+ def test_format_payload(self, addr_param):
+ result = self.module.format_payload(addr_param["in"])
+ assert result == addr_param["out"]
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param},
+ {"in": {"dns_configuration": {"register_with_dns": True}}},
+ {"in": {"management_vlan": {"enable_vlan": True}}}
+ ])
+ def test_validate_input_success(self, addr_param):
+ f_module = self.get_module_mock(params=addr_param["in"])
+ self.module.validate_input(f_module)
+
+ def _test_validate_input_fail1(self, ome_default_args):
+ ome_default_args.update(
+ {"management_vlan": {"enable_vlan": True}, "dns_configuration": {"register_with_dns": True}})
+ f_module = self.get_module_mock(params=ome_default_args)
+ error_message = "The vLAN settings cannot be updated if the 'register_with_dns' is true. " \
+ "The 'register_with_dns' cannot be updated if vLAN settings change."
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.validate_input(f_module)
+
+ def test_validate_input_fail2(self, ome_default_args):
+ ome_default_args.update({"reboot_delay": -1})
+ f_module = self.get_module_mock(params=ome_default_args)
+ error_message = "Invalid value provided for 'reboot_delay'"
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.validate_input(f_module)
+
+ @pytest.mark.parametrize("addr_param", [{"in": "192.168.0.5", "out": True},
+ {"in": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121", "out": False}])
+ def test_validate_ip_address(self, addr_param):
+ ret_val = self.module.validate_ip_address(addr_param["in"])
+ assert ret_val == addr_param["out"]
+
+ @pytest.mark.parametrize("addr_param", [{"in": "192.168.0.5", "out": False},
+ {"in": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121", "out": True}])
+ def test_validate_ip_v6_address(self, addr_param):
+ ret_val = self.module.validate_ip_v6_address(addr_param["in"])
+ assert ret_val == addr_param["out"]
+
+ src_dict1 = {"Enable": False, "EnableDHCP": True, "UseDHCPForDNSServerNames": False,
+ "StaticGateway": "192.168.11.2",
+ "StaticIPAddress": "192.168.11.20", "StaticSubnetMask": "255.255.255.0",
+ "StaticPreferredDNSServer": "192.168.11.3", "EnableAutoConfiguration": True}
+ new_dict1 = {"Enable": True, "EnableDHCP": False, "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "192.168.11.2",
+ "StaticAlternateDNSServer": "192.168.11.3"}
+ src_dict2 = {"StaticIPAddress": "192.168.11.20", "StaticSubnetMask": "255.255.255.0",
+ "EnableAutoConfiguration": False}
+ new_dict2 = {"StaticIPAddress": "192.168.11.20", "StaticSubnetMask": "255.255.255.0"}
+
+ @pytest.mark.parametrize("addr_param", [{"src_dict": src_dict1, "new_dict": new_dict1, 'diff': 4},
+ {"src_dict": src_dict2, "new_dict": new_dict2, 'diff': False},
+ {"src_dict": src_dict2, "new_dict": {}, 'diff': 0},
+ {"src_dict": src_dict2, "new_dict": {"EnableDHCP": None}, 'diff': 0}
+ ])
+ def test_update_ipv4_payload(self, addr_param):
+ ret_val = self.module.update_ipv4_payload(addr_param["src_dict"], addr_param["new_dict"])
+ assert ret_val == addr_param['diff']
+
+ v6src_dict1 = {"Enable": False, "UseDHCPForDNSServerNames": False,
+ "StaticGateway": "192.168.11.2",
+ "StaticIPAddress": "192.168.11.20", "StaticSubnetMask": "255.255.255.0",
+ "StaticPreferredDNSServer": "192.168.11.3", "EnableAutoConfiguration": False}
+ v6new_dict1 = {"Enable": True, "EnableAutoConfiguration": True, "StaticGateway": "192.168.11.1",
+ "UseDHCPForDNSServerNames": True, "StaticPreferredDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "StaticAlternateDNSServer": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121"}
+
+ @pytest.mark.parametrize("addr_param", [{"src_dict": v6src_dict1, "new_dict": v6new_dict1, 'diff': 3},
+ {"src_dict": v6src_dict1, "new_dict": {}, 'diff': 0}])
+ def test_update_ipv6_payload(self, addr_param):
+ ret_val = self.module.update_ipv6_payload(addr_param["src_dict"], addr_param["new_dict"])
+ assert ret_val == addr_param['diff']
+
+ dns_src = {"RegisterWithDNS": False, "DnsName": "openmanage-enterprise",
+ "UseDHCPForDNSDomainName": False, "DnsDomainName": "localdomain"}
+ dns_new = {"RegisterWithDNS": True, "DnsName": "openmanage-enterprise1",
+ "UseDHCPForDNSDomainName": True, "DnsDomainName": "localdomain1"}
+
+ @pytest.mark.parametrize("addr_param", [{"src_dict": dns_src, "new_dict": dns_new, 'diff': 3},
+ {"src_dict": dns_src, "new_dict": {}, 'diff': 0},
+ {"src_dict": dns_src, "new_dict": {"RegisterWithDNS": None,
+ "UseDHCPForDNSDomainName": None},
+ 'diff': 0}])
+ def test_update_dns_payload(self, addr_param):
+ ret_val = self.module.update_dns_payload(addr_param["src_dict"], addr_param["new_dict"])
+ assert ret_val == addr_param['diff']
+
+ vlan_src = {"EnableVLAN": False, "Id": 0}
+ vlan_new = {"EnableVLAN": True, "Id": 1}
+
+ @pytest.mark.parametrize("addr_param", [{"src_dict": vlan_src, "new_dict": vlan_new, 'diff': 2},
+ {"src_dict": vlan_src, "new_dict": {}, 'diff': 0},
+ {"src_dict": vlan_src, "new_dict": {"EnableVLAN": None}, 'diff': 0}])
+ def test_update_vlan_payload(self, addr_param):
+ ret_val = self.module.update_vlan_payload(addr_param["src_dict"], addr_param["new_dict"])
+ assert ret_val == addr_param['diff']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_address_main_success_failure_case1(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ ome_default_args.update({"dns_configuration": {"dns_domain_name": "localdomain"},
+ "ipv4_configuration": {"enable": True, "enable_dhcp": True},
+ "ipv6_configuration": {"enable": False, "enable_auto_configuration": True}})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_application_network_address.validate_input',
+ side_effect=exc_type("url open error"))
+ ome_default_args.update({"dns_configuration": {"dns_domain_name": "localdomain"},
+ "ipv4_configuration": {"enable": True, "enable_dhcp": True},
+ "ipv6_configuration": {"enable": False, "enable_auto_configuration": True}})
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_application_network_address.validate_input',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_application_network_address.validate_input',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'network_configuration' not in result
+ assert 'msg' in result
+
+ def test_get_network_config_data_case_01(self, ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ param = {}
+ ome_response_mock.json_data = {"value": [{"PrimaryInterface": "val1"}]}
+ f_module = self.get_module_mock(params=param)
+ nt_adp, method, POST_IP_CONFIG = self.module.get_network_config_data(
+ ome_connection_mock_for_application_network_address, f_module)
+ assert nt_adp == {'PrimaryInterface': 'val1'}
+ assert method == "POST"
+ assert POST_IP_CONFIG == "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+
+ def test_get_network_config_data_case_02(self, ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ param = {"interface_name": "val1"}
+ ome_response_mock.json_data = {"value": [{"InterfaceName": "val1"}]}
+ f_module = self.get_module_mock(params=param)
+ nt_adp, method, POST_IP_CONFIG = self.module.get_network_config_data(
+ ome_connection_mock_for_application_network_address, f_module)
+ assert nt_adp == {'InterfaceName': 'val1'}
+ assert method == "POST"
+ assert POST_IP_CONFIG == "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+
+ def test_get_network_config_data_case_03(self, ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+
+ param = {"interface_name": "interface_name"}
+ ome_response_mock.json_data = {"value": [{"InterfaceName": "val2", "PrimaryInterface": "val3"}]}
+ f_module = self.get_module_mock(params=param)
+ nt_adp, method, POST_IP_CONFIG = self.module.get_network_config_data(
+ ome_connection_mock_for_application_network_address, f_module)
+ assert nt_adp == "val3"
+ assert method == "POST"
+ assert POST_IP_CONFIG == "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+
+ def test_get_network_config_data_case_03(self, ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ param = {}
+ ome_response_mock.json_data = {"value": []}
+ f_module = self.get_module_mock(params=param)
+ nt_adp, method, POST_IP_CONFIG = self.module.get_network_config_data(
+ ome_connection_mock_for_application_network_address, f_module)
+ assert nt_adp is None
+ assert method == "POST"
+ assert POST_IP_CONFIG == "ApplicationService/Actions/Network.ConfigureNetworkAdapter"
+
+ def test_get_network_config_data_exception_case_01(self, ome_connection_mock_for_application_network_address,
+ ome_response_mock):
+ param = {"interface_name": "interface_name_val"}
+ ome_response_mock.json_data = {"value": []}
+ f_module = self.get_module_mock(params=param)
+ msg = "The 'interface_name' value provided interface_name_val is invalid"
+ with pytest.raises(Exception) as exc:
+ self.module.get_network_config_data(ome_connection_mock_for_application_network_address, f_module)
+ assert exc.value.args[0] == msg
+
+ def test_get_network_config_data_exception_case_02(self, ome_connection_mock_for_application_network_address):
+ param = {}
+ msg = "exception message"
+ ome_connection_mock_for_application_network_address.invoke_request.side_effect = Exception("exception message")
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception, match=msg):
+ self.module.get_network_config_data(
+ ome_connection_mock_for_application_network_address, f_module)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py
new file mode 100644
index 00000000..f4d32fcd
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py
@@ -0,0 +1,297 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_proxy
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+CHECK_MODE_CHANGE_FOUND_MSG = "Changes found to be applied."
+CHECK_MODE_CHANGE_NOT_FOUND_MSG = "No Changes found to be applied."
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_network_proxy(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_application_network_proxy.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ return ome_connection_mock_obj
+
+
+class TestOmeTemplate(FakeAnsibleModule):
+ module = ome_application_network_proxy
+
+ sub_param1 = {"enable_proxy": True, "ip_address": "255.0.0.0", "proxy_port": 443, "proxy_username": "username",
+ "proxy_password": "password",
+ "enable_authentication": True}
+ sub_param2 = {"enable_proxy": False}
+
+ @pytest.mark.parametrize("sub_param", [sub_param1, sub_param2])
+ def test_ome_application_network_proxy_main_success_case_01(self, mocker, ome_default_args, sub_param,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ ome_default_args.update(sub_param)
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.get_payload", return_value={"key": "val"})
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.get_updated_payload", return_value={"key": "val"})
+ ome_response_mock.json_data = {"EnableProxy": True, "IpAddress": "255.0.0.0", "PortNumber": 443,
+ "Username": "username", "Password": "password", "EnableAuthentication": True}
+ result = self.execute_module(ome_default_args)
+ assert result['changed'] is True
+ assert "msg" in result
+ assert "proxy_configuration" in result and result["proxy_configuration"] == {"EnableProxy": True,
+ "IpAddress": "255.0.0.0",
+ "PortNumber": 443,
+ "Username": "username",
+ "Password": "password",
+ "EnableAuthentication": True}
+ assert result["msg"] == "Successfully updated network proxy configuration."
+
+ sub_param1 = {"param": {"enable_proxy": True, "ip_address": "255.0.0.0"},
+ "msg": 'enable_proxy is True but all of the following are missing: proxy_port'}
+ sub_param2 = {"param": {"enable_proxy": True, "proxy_port": 443},
+ "msg": 'enable_proxy is True but all of the following are missing: ip_address'}
+ sub_param3 = {"param": {"enable_proxy": True},
+ "msg": 'enable_proxy is True but all of the following are missing: ip_address, proxy_port'}
+ sub_param4 = {"param": {}, "msg": 'missing required arguments: enable_proxy'}
+
+ @pytest.mark.parametrize("param", [sub_param1, sub_param2, sub_param3, sub_param4])
+ def test_ome_application_network_proxy_main_failure_case_01(self, mocker, ome_default_args, param,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ sub_param = param["param"]
+ msg = param["msg"]
+ ome_default_args.update(sub_param)
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == msg
+ assert "proxy_configuration" not in result
+ assert result["failed"] is True
+
+ sub_param1 = {
+ "param": {"enable_proxy": True, "proxy_port": 443, "ip_address": "255.0.0.0", "enable_authentication": True,
+ "proxy_username": "255.0.0.0"},
+ "msg": 'enable_authentication is True but all of the following are missing: proxy_password'}
+ sub_param2 = {
+ "param": {"enable_proxy": True, "proxy_port": 443, "ip_address": "255.0.0.0", "enable_authentication": True,
+ "proxy_password": 443},
+ "msg": 'enable_authentication is True but all of the following are missing: proxy_username'}
+ sub_param3 = {
+ "param": {"enable_proxy": True, "proxy_port": 443, "ip_address": "255.0.0.0", "enable_authentication": True},
+ "msg": 'enable_authentication is True but all of the following are missing: proxy_username, proxy_password'}
+
+ @pytest.mark.parametrize("param", [sub_param1, sub_param2, sub_param3])
+ def test_ome_application_network_proxy_main_failure_case_02(self, mocker, ome_default_args, param,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ sub_param = param["param"]
+ msg = param["msg"]
+ ome_default_args.update(sub_param)
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == msg
+ assert "proxy_configuration" not in result
+ assert result["failed"] is True
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_proxy_main_success_failure_case3(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ ome_default_args.update({"enable_proxy": False})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_application_network_proxy.get_payload',
+ side_effect=exc_type("TEST"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_application_network_proxy.get_payload',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_application_network_proxy.get_payload',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'proxy_configuration' not in result
+ assert 'msg' in result
+
+ def test_remove_unwanted_keys(self, ome_default_args):
+ removable_keys = list(ome_default_args.keys())
+ new_param = {
+ "ip_address": "IpAddress",
+ "proxy_port": "PortNumber",
+ "enable_proxy": "EnableProxy",
+ "proxy_username": "Username",
+ "proxy_password": "Password",
+ "enable_authentication": "EnableAuthentication"
+ }
+ ome_default_args.update(new_param)
+ self.module.remove_unwanted_keys(removable_keys, ome_default_args)
+ assert len(set(new_param.keys()) - set(ome_default_args.keys())) == 0
+
+ def test_remove_unwanted_keys_case2(self):
+ """when key not exists should not throw error"""
+ current_setting = {"@odata.context": "context", "@odata.type": "data_type", "@odata.id": "@odata.id"}
+ removable_keys = ["@odata.context", "@odata.type", "@odata.id", "Password"]
+ self.module.remove_unwanted_keys(removable_keys, current_setting)
+ assert len(current_setting) == 0
+
+ def test_get_payload(self, ome_default_args):
+ new_param = {
+ "ip_address": "192.168.0.2",
+ "proxy_port": 443,
+ "enable_proxy": True,
+ "proxy_username": "username",
+ "proxy_password": "password",
+ "enable_authentication": False,
+ "port": 443
+ }
+ ome_default_args.update(new_param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ payload = self.module.get_payload(f_module)
+ assert ome_default_args == {"ip_address": "192.168.0.2",
+ "proxy_port": 443,
+ "enable_proxy": True,
+ "proxy_username": "username",
+ "proxy_password": "password",
+ "enable_authentication": False,
+ "hostname": "192.168.0.1",
+ "username": "username",
+ "password": "password",
+ "port": 443,
+ "ca_path": "/path/ca_bundle"}
+ assert payload == {"EnableProxy": True, "IpAddress": "192.168.0.2", "PortNumber": 443, "Username": "username",
+ "Password": "password", "EnableAuthentication": False}
+
+ def test_get_updated_payload_success_case(self, mocker, ome_default_args, ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.ProxyConfiguration",
+ "@odata.type": "#Network.ProxyConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
+ "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username1", "Password": "password1"}
+ payload = {"EnableAuthentication": True, "IpAddress": "192.168.0.1", "PortNumber": 443, 'EnableProxy': True,
+ 'Username': 'username2', "Password": "password2"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
+ return_value=None)
+ setting = self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
+ assert setting == payload
+
+ def test_get_updated_payload_enable_auth_disable_success_case(self, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ """when EnableAuthentication is False setting will not have Password and UserName even if its passed"""
+ ome_default_args.update(
+ {"enable_authentication": False, "proxy_username": 'username2', "proxy_password": "password2"})
+ current_setting = {"@odata.context": "/api/$metadata#Network.ProxyConfiguration",
+ "@odata.type": "#Network.ProxyConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
+ "PortNumber": 443, "EnableAuthentication": True, "EnableProxy": True,
+ "Username": "username1", "Password": "password1"}
+ payload = {"EnableAuthentication": False, "IpAddress": "192.168.0.1", "PortNumber": 443, 'EnableProxy': True,
+ 'Username': 'username2', "Password": "password2"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
+ return_value=None)
+ setting = self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
+ assert setting == {"EnableAuthentication": False, "IpAddress": "192.168.0.1", "PortNumber": 443,
+ 'EnableProxy': True}
+
+ def test_get_updated_payload_when_same_setting_failure_case1(self, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.ProxyConfiguration",
+ "@odata.type": "#Network.ProxyConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
+ "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username", "Password": "password"}
+ payload = {"IpAddress": "255.0.0.0", "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username", "Password": "password"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ error_message = "No changes made to proxy configuration as entered values are the same as current " \
+ "configuration values."
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
+ return_value=None)
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
+
+ def test_get_updated_payload_when_same_setting_failure_case2(self, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ """Password are ignored for difference check in payload"""
+ current_setting = {"@odata.context": "/api/$metadata#Network.ProxyConfiguration",
+ "@odata.type": "#Network.ProxyConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
+ "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username", "Password": "password1"}
+ payload = {"IpAddress": "255.0.0.0", "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username", "Password": "password2"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ error_message = "No changes made to proxy configuration as entered values are the same as current " \
+ "configuration values."
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
+ return_value=None)
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
+
+ def test_get_updated_payload_when_no_diff_failure_case(self, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_proxy,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.ProxyConfiguration",
+ "@odata.type": "#Network.ProxyConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
+ "PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
+ "Username": "username", "Password": "password"}
+ payload = {}
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ error_message = "Unable to configure the proxy because proxy configuration settings are not provided."
+ mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
+ return_value=None)
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
+
+ def test_validate_check_mode_for_network_proxy_case01(self, ome_default_args):
+ f_module = self.get_module_mock(params={}, check_mode=True)
+ with pytest.raises(Exception, match=CHECK_MODE_CHANGE_FOUND_MSG):
+ self.module.validate_check_mode_for_network_proxy(True, f_module)
+
+ def test_validate_check_mode_for_network_proxy_case02(self, ome_default_args):
+ f_module = self.get_module_mock(params={}, check_mode=True)
+ with pytest.raises(Exception, match=CHECK_MODE_CHANGE_NOT_FOUND_MSG):
+ self.module.validate_check_mode_for_network_proxy(False, f_module)
+
+ def test_validate_check_mode_for_network_proxy_case03(self, ome_default_args):
+ f_module = self.get_module_mock(params={}, check_mode=False)
+ self.module.validate_check_mode_for_network_proxy(True, f_module)
+
+ def test_validate_check_mode_for_network_proxy_case04(self, ome_default_args):
+ f_module = self.get_module_mock(params={}, check_mode=False)
+ self.module.validate_check_mode_for_network_proxy(False, f_module)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py
new file mode 100644
index 00000000..0cd91a7f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py
@@ -0,0 +1,381 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.4.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_settings
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+SUCCESS_MSG = "Successfully updated the session timeout settings."
+NO_CHANGES = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_network_settings.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_ns(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeApplicationNetworkSettings(FakeAnsibleModule):
+ module = ome_application_network_settings
+
+ responseData = {
+ "value": [
+ {
+ "@odata.type": "#SessionService.SessionConfiguration",
+ "SessionType": "GUI",
+ "MaxSessions": 5,
+ "SessionTimeout": 1380000,
+ "MinSessionTimeout": 60000,
+ "MaxSessionTimeout": 86400000,
+ "MinSessionsAllowed": 1,
+ "MaxSessionsAllowed": 100,
+ "MaxSessionsConfigurable": True,
+ "SessionTimeoutConfigurable": True
+ },
+ {
+ "@odata.type": "#SessionService.SessionConfiguration",
+ "SessionType": "API",
+ "MaxSessions": 100,
+ "SessionTimeout": 1380000,
+ "MinSessionTimeout": 60000,
+ "MaxSessionTimeout": 86400000,
+ "MinSessionsAllowed": 1,
+ "MaxSessionsAllowed": 100,
+ "MaxSessionsConfigurable": True,
+ "SessionTimeoutConfigurable": True
+ },
+ {
+ "@odata.type": "#SessionService.SessionConfiguration",
+ "SessionType": "UniversalTimeout",
+ "MaxSessions": 0,
+ "SessionTimeout": 1380000,
+ "MinSessionTimeout": -1,
+ "MaxSessionTimeout": 86400000,
+ "MinSessionsAllowed": 0,
+ "MaxSessionsAllowed": 0,
+ "MaxSessionsConfigurable": False,
+ "SessionTimeoutConfigurable": True
+ }
+ ]
+ }
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "destination_address": "localhost", "port_number": 25, "use_ssl": True,
+ "enable_authentication": True,
+ "credentials": {"username": "username", "password": "password"}
+ },
+ "json_data": responseData
+ }
+ ])
+ def test_fetch_session_inactivity_settings(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["json_data"]
+ ret_data = self.module.fetch_session_inactivity_settings(ome_connection_mock_for_ns)
+ assert ret_data[0].get("SessionType") == "GUI"
+ assert ret_data[0].get("MaxSessions") == 5
+ assert ret_data[0].get("SessionTimeout") == 1380000
+
+ @pytest.mark.parametrize("params", [
+ {
+ "json_data": responseData.get("value"),
+ "payload": responseData.get("value"),
+ }
+ ])
+ def test_update_session_inactivity_settings(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ payload = params["payload"]
+ ret_value = self.module.update_session_inactivity_settings(ome_connection_mock_for_ns, payload)
+ ret_data = ret_value.json_data
+ assert ret_data[0].get("SessionType") == "GUI"
+ assert ret_data[0].get("MaxSessions") == 5
+ assert ret_data[0].get("SessionTimeout") == 1380000
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "enable_universal_timeout": True,
+ "universal_timeout": 2
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_ut_enable(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert ret_data[2].get("SessionType") == "UniversalTimeout"
+ assert ret_data[2].get("SessionTimeout") == 120000
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "enable_universal_timeout": False,
+ "universal_timeout": 2
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_ut_disable(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert ret_data[2].get("SessionType") == "UniversalTimeout"
+ assert ret_data[2].get("SessionTimeout") == -1
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "universal_timeout": 2
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_no_change(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert diff == 0
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "api_timeout": 2
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_timeout_change(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert ret_data[1].get("SessionTimeout") == 1380000
+ assert diff == 0
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "api_sessions": 90
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_max_sessions_change(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert ret_data[1].get("MaxSessions") == 90
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "api_timeout": 2,
+ "api_sessions": 90
+ }
+ },
+ "payload": responseData.get("value")
+ }
+ ])
+ def test_update_payload_timeout_and_max_session_change(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ f_module = self.get_module_mock(params=params['module_args'])
+ payload = params["payload"]
+ ret_data, diff = self.module.update_payload(f_module, payload)
+ assert ret_data[1].get("SessionTimeout") == 1380000
+ assert ret_data[1].get("MaxSessions") == 90
+ assert diff == 1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "session_inactivity_timeout": {
+ "api_timeout": 2,
+ "api_sessions": 90
+ },
+ "payload": responseData.get("value")[0]
+ }
+ ])
+ def test_get_value_s1(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ payload = params["payload"]
+ ret_data = self.module.get_value(params.get("session_inactivity_timeout"),
+ payload, "api_timeout", "SessionTimeout")
+ assert ret_data == 120000
+
+ @pytest.mark.parametrize("params", [
+ {
+ "session_inactivity_timeout": {
+ "api_sessions": 90
+ },
+ "payload": responseData.get("value")[0]
+ }
+ ])
+ def test_get_value_s2(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ payload = params["payload"]
+ ret_data = self.module.get_value(params.get("session_inactivity_timeout"),
+ payload, "api_timeout", "SessionTimeout")
+ assert ret_data == 1380000
+
+ @pytest.mark.parametrize("params", [
+ {
+ "session_inactivity_timeout": {
+ "universal_timeout": -1
+ },
+ "payload": responseData.get("value")[2]
+ }
+ ])
+ def test_get_value_s3(self, params, ome_connection_mock_for_ns, ome_response_mock):
+ payload = params["payload"]
+ ret_data = self.module.get_value(params.get("session_inactivity_timeout"),
+ payload, "universal_timeout", "SessionTimeout")
+ assert ret_data == -1
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "enable_universal_timeout": True,
+ "universal_timeout": 2
+ },
+ },
+ "json_data": responseData.get("value"),
+ "get_json_data": responseData.get("value"),
+ "update_payload": responseData.get("value"),
+ }
+ ])
+ def test_module_success(self, mocker, params, ome_connection_mock_for_ns, ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ update_json_data = params["update_payload"]
+ update_json_data[2]["SessionTimeout"] = 120000
+ mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings', return_value=params["get_json_data"])
+ mocker.patch(MODULE_PATH + 'update_payload', return_value=[update_json_data, 1])
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == SUCCESS_MSG
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "enable_universal_timeout": True,
+ "universal_timeout": 2
+ },
+ },
+ "json_data": responseData.get("value"),
+ "get_json_data": responseData.get("value"),
+ "update_payload": responseData.get("value"),
+ }
+ ])
+ def test_module_no_idempotent(self, mocker, params, ome_connection_mock_for_ns, ome_response_mock,
+ ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ update_json_data = params["update_payload"]
+ mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings', return_value=params["get_json_data"])
+ mocker.patch(MODULE_PATH + 'update_payload', return_value=[update_json_data, 0])
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == NO_CHANGES
+
+ @pytest.mark.parametrize("params", [
+ {
+ "module_args": {
+ "session_inactivity_timeout": {
+ "enable_universal_timeout": True,
+ "universal_timeout": 2
+ },
+ },
+ "json_data": responseData.get("value"),
+ "get_json_data": responseData.get("value"),
+ "update_payload": responseData.get("value"),
+ }
+ ])
+ def test_module_check_mode(self, mocker, params, ome_connection_mock_for_ns, ome_response_mock, ome_default_args):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 201
+ ome_response_mock.json_data = params["json_data"]
+ ome_default_args.update(params['module_args'])
+ f_module = self.get_module_mock(params=ome_default_args)
+
+ f_module.check_mode = True
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 1)
+ assert err.value.args[0] == CHANGES_FOUND
+
+ f_module.check_mode = False
+
+ with pytest.raises(Exception) as err:
+ self.module.process_check_mode(f_module, 0)
+ assert err.value.args[0] == NO_CHANGES
+
+ @pytest.mark.parametrize("exc_type",
+ [HTTPError, URLError])
+ def test_session_inactivity_settings_main_exception_case(self, mocker, exc_type, ome_connection_mock_for_ns,
+ ome_response_mock,
+ ome_default_args):
+ ome_default_args.update({"session_inactivity_timeout": {
+ "enable_universal_timeout": True,
+ "universal_timeout": 2
+ }})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings', side_effect=exc_type("url open"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py
new file mode 100644
index 00000000..53e32311
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py
@@ -0,0 +1,584 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_time
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_network_time(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_application_network_time.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ return ome_connection_mock_obj
+
+
+class TestOmeTemplate(FakeAnsibleModule):
+ module = ome_application_network_time
+
+ sub_param1 = {"enable_ntp": False, "time_zone": "TZ_ID_3"}
+ sub_param2 = {"enable_ntp": False, "system_time": "2020-03-31 21:35:19"}
+ sub_param3 = {"enable_ntp": False, "time_zone": "TZ_ID_3", "system_time": "2020-03-31 21:35:19"}
+
+ @pytest.mark.parametrize("param1", [sub_param2, sub_param3])
+ def test_ome_application_network_time_main_enable_ntp_false_success_case_01(self, mocker, ome_default_args, param1,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ ome_default_args.update(param1)
+ mocker.patch(MODULE_PATH + "ome_application_network_time.validate_input")
+ mocker.patch(MODULE_PATH + "ome_application_network_time.validate_time_zone")
+ mocker.patch(MODULE_PATH + "ome_application_network_time.get_payload", return_value={"key": "val"})
+ mocker.patch(MODULE_PATH + "ome_application_network_time.get_updated_payload", return_value={"key": "val"})
+ time_data = {
+ "EnableNTP": False,
+ "JobId": None,
+ "PrimaryNTPAddress": None,
+ "SecondaryNTPAddress1": None,
+ "SecondaryNTPAddress2": None,
+ "SystemTime": None,
+ "TimeSource": "Local Clock",
+ "TimeZone": "TZ_ID_3",
+ "TimeZoneIdLinux": None,
+ "TimeZoneIdWindows": None,
+ "UtcTime": None
+ }
+ ome_response_mock.json_data = time_data
+ result = self.execute_module(ome_default_args)
+ assert result['changed'] is True
+ assert "msg" in result
+ assert "time_configuration" in result and result["time_configuration"] == time_data
+ assert result["msg"] == "Successfully configured network time."
+
+ @pytest.mark.parametrize("param1", [{"enable_ntp": True, "time_zone": "TZ_ID_66"}])
+ @pytest.mark.parametrize("param2", [{"primary_ntp_address": "192.168.0.2"},
+ {"secondary_ntp_address1": "192.168.0.3"},
+ {"secondary_ntp_address2": "192.168.0.4"},
+ {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address1": "192.168.0.3"},
+ {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address2": "192.168.0.4"},
+ {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address1": "192.168.0.3",
+ "secondary_ntp_address2": "192.168.0.4"}
+ ])
+ def test_ome_application_network_time_main_enable_ntp_true_success_case_01(self, mocker, ome_default_args, param1,
+ param2,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ ome_default_args.update(param1)
+ ome_default_args.update(param2)
+ mocker.patch(MODULE_PATH + "ome_application_network_time.validate_input")
+ mocker.patch(MODULE_PATH + "ome_application_network_time.validate_time_zone")
+ mocker.patch(MODULE_PATH + "ome_application_network_time.get_payload", return_value={"key": "val"})
+ mocker.patch(MODULE_PATH + "ome_application_network_time.get_updated_payload", return_value={"key": "val"})
+ time_data = {
+ "EnableNTP": True,
+ "JobId": None,
+ "PrimaryNTPAddress": "192.168.0.2",
+ "SecondaryNTPAddress1": "192.168.0.3",
+ "SecondaryNTPAddress2": "192.168.0.4",
+ "SystemTime": None,
+ "TimeSource": "10.136.112.222",
+ "TimeZone": "TZ_ID_66",
+ "TimeZoneIdLinux": None,
+ "TimeZoneIdWindows": None,
+ "UtcTime": None
+ }
+ ome_response_mock.json_data = time_data
+ result = self.execute_module(ome_default_args)
+ assert result['changed'] is True
+ assert "msg" in result
+ assert "time_configuration" in result and result["time_configuration"] == time_data
+ assert result["msg"] == "Successfully configured network time."
+
+ sub_param1 = {
+ "param": {"enable_ntp": True, "primary_ntp_address": "255.0.0.0", "system_time": "2020-03-31 21:35:19"}, "msg":
+ 'parameters are mutually exclusive: system_time|primary_ntp_address'}
+ sub_param2 = {"param": {}, "msg": 'missing required arguments: enable_ntp'}
+ sub_param3 = {"param": {"enable_ntp": False},
+ "msg": "enable_ntp is False but any of the following are missing: time_zone, system_time"}
+ sub_param4 = {"param": {"enable_ntp": True},
+ "msg": "enable_ntp is True but any of the following are missing:"
+ " time_zone, primary_ntp_address, secondary_ntp_address1, secondary_ntp_address2"}
+ sub_param5 = {
+ "param": {
+ "enable_ntp": False,
+ "primary_ntp_address": "10.136.112.220"
+ },
+ "msg": "enable_ntp is False but any of the following are missing:"
+ " time_zone, system_time"
+ }
+ sub_param6 = {
+ "param": {
+ "enable_ntp": False,
+ "secondary_ntp_address1": "10.136.112.220",
+ "system_time": "2020-03-31 21:35:19"
+ },
+ "msg": "parameters are mutually exclusive: system_time|secondary_ntp_address1"
+ }
+ sub_param7 = {
+ "param": {
+ "enable_ntp": False,
+ "secondary_ntp_address2": "10.136.112.220",
+ "system_time": "2020-03-31 21:35:19"
+ },
+ "msg": "parameters are mutually exclusive: system_time|secondary_ntp_address2"
+ }
+ sub_param8 = {"param": {"enable_ntp": False, "primary_ntp_address": "10.136.112.220",
+ "secondary_ntp_address1": "10.136.112.220", "system_time": "2020-03-31 21:35:19"},
+ "msg": "parameters are mutually exclusive: system_time|primary_ntp_address,"
+ " system_time|secondary_ntp_address1"}
+ sub_param9 = {
+ "param": {"enable_ntp": False, "system_time": "2020-03-31 21:35:19", "primary_ntp_address": "10.136.112.220",
+ "secondary_ntp_address2": "10.136.112.220"},
+ "msg": "parameters are mutually exclusive: system_time|primary_ntp_address, system_time|secondary_ntp_address2"}
+ sub_param10 = {
+ "param": {"enable_ntp": False, "system_time": "2020-03-31 21:35:19", "primary_ntp_address": "10.136.112.220",
+ "secondary_ntp_address2": "10.136.112.220", "secondary_ntp_address1": "10.136.112.220"},
+ "msg": "parameters are mutually exclusive: system_time|primary_ntp_address,"
+ " system_time|secondary_ntp_address1, system_time|secondary_ntp_address2"}
+ sub_param11 = {
+ "param": {"enable_ntp": False, "primary_ntp_address": "255.0.0.0", "system_time": "2020-03-31 21:35:19"},
+ "msg": 'parameters are mutually exclusive: system_time|primary_ntp_address'}
+
+ @pytest.mark.parametrize("param",
+ [sub_param1, sub_param2, sub_param3, sub_param4, sub_param5, sub_param6, sub_param7,
+ sub_param8,
+ sub_param9, sub_param10, sub_param11])
+ def test_ome_application_network_time_main_failure_case_01(self, mocker, ome_default_args, param,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ sub_param = param["param"]
+ msg = param["msg"]
+ ome_default_args.update(sub_param)
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == msg
+ assert "time_configuration" not in result
+ assert result["failed"] is True
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_time_main_success_exception_case3(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ mocker.patch(MODULE_PATH + "ome_application_network_time.validate_time_zone")
+ ome_default_args.update({"enable_ntp": False, "system_time": "2020-03-31 21:35:18"})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_application_network_time.get_payload', side_effect=URLError('TESTS'))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ assert 'TESTS' in result['msg']
+ assert result['changed'] is False
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_application_network_time.get_payload',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_application_network_time.get_payload',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'time_configuration' not in result
+ assert 'msg' in result
+
+ def test_remove_unwanted_keys_default_keys_time(self, ome_default_args):
+ removable_keys = list(ome_default_args.keys())
+ new_param = {
+ "enable_ntp": True,
+ "time_zone": "TimeZone",
+ "primary_ntp_address": "192.168.0.2",
+ "secondary_ntp_address1": "192.168.0.3",
+ "secondary_ntp_address2": "192.168.0.4"
+ }
+ ome_default_args.update(new_param)
+ self.module.remove_unwanted_keys(removable_keys, ome_default_args)
+ assert len(set(new_param.keys()) - set(ome_default_args.keys())) == 0
+
+ def test_remove_unwanted_keys_unwanted_keys_time(self):
+ """when key not exists should not throw error"""
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration", "TimeZone": "TZ_ID_1",
+ "TimeZoneIdLinux": "Etc/GMT+12", "TimeZoneIdWindows": "Dateline Standard Time",
+ "EnableNTP": False, "PrimaryNTPAddress": None, "SecondaryNTPAddress1": None,
+ "SecondaryNTPAddress2": None, "SystemTime": "2020-03-31 21:37:08.897",
+ "TimeSource": "Local Clock", "UtcTime": "2020-04-01 09:37:08.897"}
+ removable_keys = ["@odata.context", "@odata.type", "@odata.id", "TimeZoneIdLinux", "TimeZoneIdWindows",
+ "TimeSource", "UtcTime"]
+ self.module.remove_unwanted_keys(removable_keys, current_setting)
+ assert current_setting == {"TimeZone": "TZ_ID_1", "EnableNTP": False, "PrimaryNTPAddress": None,
+ "SecondaryNTPAddress1": None, "SecondaryNTPAddress2": None,
+ "SystemTime": "2020-03-31 21:37:08.897"}
+
+ def test_get_payload_time_case1(self, ome_default_args):
+ new_param = {
+ "enable_ntp": False,
+ "primary_ntp_address": None,
+ "secondary_ntp_address1": None,
+ "secondary_ntp_address2": None,
+ "system_time": "2020-03-31 21:35:19",
+ "time_zone": "TZ_ID_1",
+ }
+ ome_default_args.update(new_param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ payload = self.module.get_payload(f_module)
+ assert f_module.params == ome_default_args
+ assert payload == {"EnableNTP": False, "TimeZone": "TZ_ID_1", "SystemTime": "2020-03-31 21:35:19"}
+
+ def test_get_payload_time_case2(self, ome_default_args):
+ new_param = {
+ "enable_ntp": True,
+ "primary_ntp_address": "10.136.112.220",
+ "secondary_ntp_address1": "10.136.112.221",
+ "secondary_ntp_address2": "10.136.112.222",
+ "system_time": None,
+ "time_zone": "TZ_ID_66"
+ }
+ ome_default_args.update(new_param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ payload = self.module.get_payload(f_module)
+ assert ome_default_args == {
+ "enable_ntp": True,
+ "primary_ntp_address": "10.136.112.220",
+ "secondary_ntp_address1": "10.136.112.221",
+ "secondary_ntp_address2": "10.136.112.222",
+ "system_time": None,
+ "time_zone": "TZ_ID_66",
+ "hostname": "192.168.0.1",
+ "username": "username",
+ "password": "password",
+ "ca_path": "/path/ca_bundle"}
+ assert payload == {"EnableNTP": True, "TimeZone": "TZ_ID_66", "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222"
+ }
+
+ def test_get_updated_payload_success_case(self, ome_default_args, ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration", "TimeZone": "TZ_ID_02",
+ "TimeZoneIdLinux": "Asia/Colombo", "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True, "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221", "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825", "TimeSource": "10.136.112.222",
+ "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "TimeZone": "TZ_ID_66",
+ "SecondaryNTPAddress1": "10.136.112.02",
+ "SecondaryNTPAddress2": "10.136.112.03"
+ }
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = current_setting
+ setting = self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+ expected_payload = {"EnableNTP": True, "TimeZone": "TZ_ID_66",
+ "SecondaryNTPAddress1": "10.136.112.02",
+ "SecondaryNTPAddress2": "10.136.112.03",
+ "PrimaryNTPAddress": "10.136.112.220", # updated not given key from current_setting
+ "SystemTime": "2020-04-01 15:39:23.825", # system will be ignore from ome
+ }
+ assert setting == expected_payload
+
+ def test_get_updated_payload_check_mode_success_case1(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": "Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "TimeZone": "TZ_ID_02",
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222"
+ }
+ ome_response_mock.json_data = current_setting
+ check_mode_no_diff_msg = "No changes found to be applied to the time configuration."
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception, match=check_mode_no_diff_msg):
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+
+ def test_get_updated_payload_check_mode_success_case2(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": "Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "PrimaryNTPAddress": "10.136.112.220"}
+ ome_response_mock.json_data = current_setting
+ check_mode_no_diff_msg = "No changes found to be applied to the time configuration."
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception, match=check_mode_no_diff_msg) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+
+ def test_get_updated_payload_check_mode_success_case3(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": "Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "PrimaryNTPAddress": "10.136.112.221"} # change in value
+ ome_response_mock.json_data = current_setting
+ check_mode_no_diff_msg = "Changes found to be applied to the time configuration."
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception, match=check_mode_no_diff_msg):
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+
+ def test_get_updated_payload_without_check_mode_success_case(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ """without check even there is no difference no exception thrown"""
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": " Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {'EnableNTP': True,
+ 'PrimaryNTPAddress': '10.136.112.220',
+ 'SecondaryNTPAddress1': '10.136.112.221',
+ 'SecondaryNTPAddress2': '10.136.112.222',
+ 'SystemTime': '2020-04-01 15:39:23.826',
+ 'TimeZone': 'TZ_ID_02'}
+ ome_response_mock.json_data = current_setting
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ current_setting = self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+ assert current_setting == payload
+
+ @pytest.mark.parametrize("time_zone_val", ["", 0, "invalid", "TZ_ID_100001"])
+ def test_validate_time_zone_failure_case01(self, ome_default_args, time_zone_val, ome_response_mock,
+ ome_connection_mock_for_application_network_time):
+ param = {"time_zone": time_zone_val}
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = {"@odata.context": "/api/$metadata#Collection(Network.TimeZone)",
+ "@odata.count": 3,
+ "value": [{"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 60,
+ "Id": "TZ_ID_38", "Name":
+ "(GMT+01:00) Brussels, Copenhagen, Madrid, Paris"},
+ {"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 60,
+ "Id": "TZ_ID_39", "Name":
+ "(GMT+01:00) Sarajevo, Skopje, Warsaw, Zagreb"},
+ {"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 360,
+ "Id": "TZ_ID_70", "Name": "(GMT+06:00) Novosibirsk"}]}
+ msg = "Provide valid time zone.Choices are TZ_ID_38,TZ_ID_39,TZ_ID_70"
+ with pytest.raises(Exception, match=msg):
+ self.module.validate_time_zone(f_module, ome_connection_mock_for_application_network_time)
+
+ def test_validate_time_zone_successcase01(self, ome_default_args, ome_response_mock,
+ ome_connection_mock_for_application_network_time):
+ param = {"time_zone": "TZ_ID_38"}
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_response_mock.json_data = {"@odata.context": "/api/$metadata#Collection(Network.TimeZone)",
+ "@odata.count": 3,
+ "value": [{"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 60,
+ "Id": "TZ_ID_38",
+ "Name": "(GMT+01:00) Brussels, Copenhagen, Madrid, Paris"},
+ {"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 60,
+ "Id": "TZ_ID_39",
+ "Name": "(GMT+01:00) Sarajevo, Skopje, Warsaw, Zagreb"},
+ {"@odata.type": "#Network.TimeZone", "Utcoffsetminutes": 360,
+ "Id": "TZ_ID_70", "Name": "(GMT+06:00) Novosibirsk"}]}
+ self.module.validate_time_zone(f_module, ome_connection_mock_for_application_network_time)
+ assert ome_connection_mock_for_application_network_time.invoke_request.called
+
+ def test_validate_time_zone_successcase02(self, ome_default_args, ome_response_mock,
+ ome_connection_mock_for_application_network_time):
+ param = {"enable_ntp": True}
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.validate_time_zone(f_module, ome_connection_mock_for_application_network_time)
+ assert not ome_connection_mock_for_application_network_time.invoke_request.called
+
+ def test_validate_time_zone_successcase03(self, ome_default_args, ome_response_mock,
+ ome_connection_mock_for_application_network_time):
+ param = {"time_zone": None}
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.validate_time_zone(f_module, ome_connection_mock_for_application_network_time)
+ assert not ome_connection_mock_for_application_network_time.invoke_request.called
+
+ def test_validate_input_time_enable_true_case_01(self, ome_default_args):
+ params = {"enable_ntp": True, "system_time": "2020-04-01 15:39:23.825"}
+ ome_default_args.update(params)
+ f_module = self.get_module_mock(params=ome_default_args)
+ msg = 'When enable NTP is true,the option system time is not accepted.'
+ with pytest.raises(Exception) as exc:
+ self.module.validate_input(f_module)
+ assert exc.value.args[0] == msg
+
+ @pytest.mark.parametrize("sub_param", [
+ {"primary_ntp_address": "192.168.02.1", "secondary_ntp_address1": "192.168.02.3",
+ "secondary_ntp_address2": "192.168.02.2"},
+ {"secondary_ntp_address1": "192.168.02.1"},
+ {"secondary_ntp_address2": "192.168.02.1"},
+ {"primary_ntp_address": "192.168.02.1", "time_zone": "TZ_01"},
+ {"primary_ntp_address": "192.168.02.1"},
+ {"secondary_ntp_address1": "192.168.02.1", "time_zone": "TZ_01"},
+ ])
+ def test_validate_input_time_enable_false_case_01(self, ome_default_args, sub_param):
+ params = {"enable_ntp": False}
+ params.update(sub_param)
+ ome_default_args.update(params)
+ f_module = self.get_module_mock(params=ome_default_args)
+ msg = "When enable NTP is false,the option(s) primary_ntp_address, secondary_ntp_address1 and secondary_ntp_address2 is not accepted."
+ with pytest.raises(Exception) as exc:
+ self.module.validate_input(f_module)
+ assert exc.value.args[0] == msg
+
+ @pytest.mark.parametrize("sub_param", [{"time_zone": "TZ_01"}, {"primary_ntp_address": "192.168.02.1"},
+ {"secondary_ntp_address1": "192.168.02.1"},
+ {"secondary_ntp_address2": "192.168.02.1"},
+ {"primary_ntp_address": "192.168.02.1", "time_zone": "TZ_01"}, {}
+ ])
+ def test_validate_input_time_enable_true_case_04(self, ome_default_args, sub_param):
+ """
+ exception should not be raised
+ """
+ params = {"enable_ntp": True}
+ params.update(sub_param)
+ ome_default_args.update(params)
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.validate_input(f_module)
+
+ @pytest.mark.parametrize("sub_param", [{"time_zone": "TZI_01"}, {"system_time": "2020-04-01 15:39:23.825"},
+ {"time_zone": "TZI_01", "system_time": "2020-04-01 15:39:23.825"}, {}])
+ def test_validate_input_time_enable_false_case_03(self, ome_default_args, sub_param):
+ """success case. if required options passed no exception thrown"""
+ params = {"enable_ntp": False}
+ params.update(sub_param)
+ ome_default_args.update(params)
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.validate_input(f_module)
+
+ def test_get_updated_payload_non_check_mode_success_case1(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": "Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "TimeZone": "TZ_ID_02",
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222"
+ }
+ ome_response_mock.json_data = current_setting
+ check_mode_no_diff_msg = "No changes made to the time configuration as the entered values are the same as the current configuration."
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ with pytest.raises(Exception, match=check_mode_no_diff_msg):
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+
+ def test_get_updated_payload_non_check_mode_success_case2(self, ome_default_args,
+ ome_connection_mock_for_application_network_time,
+ ome_response_mock):
+ current_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_02", "TimeZoneIdLinux": "Asia/Colombo",
+ "TimeZoneIdWindows": "Sri Lanka Standard Time",
+ "EnableNTP": True,
+ "PrimaryNTPAddress": "10.136.112.220",
+ "SecondaryNTPAddress1": "10.136.112.221",
+ "SecondaryNTPAddress2": "10.136.112.222",
+ "SystemTime": "2020-04-01 15:39:23.825",
+ "TimeSource": "10.136.112.222", "UtcTime": "2020-04-01 10:09:23.825"}
+ payload = {"EnableNTP": True, "PrimaryNTPAddress": "10.136.112.220"}
+ ome_response_mock.json_data = current_setting
+ check_mode_no_diff_msg = "No changes made to the time configuration as the entered values are the same as the current configuration."
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ with pytest.raises(Exception, match=check_mode_no_diff_msg) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_time,
+ f_module, payload)
+
+ def test_update_time_config_output(self):
+ backup_setting = {"@odata.context": "/api/$metadata#Network.TimeConfiguration",
+ "@odata.type": "#Network.TimeConfiguration",
+ "@odata.id": "/api/ApplicationService/Network/TimeConfiguration",
+ "TimeZone": "TZ_ID_1",
+ "TimeZoneIdLinux": "Etc/GMT+12",
+ "TimeZoneIdWindows": "Dateline Standard Time",
+ "EnableNTP": False,
+ "PrimaryNTPAddress": None,
+ "SecondaryNTPAddress1": None,
+ "SecondaryNTPAddress2": None,
+ "SystemTime": "2020-03-31 21:37:08.897",
+ "TimeSource": "Local Clock",
+ "UtcTime": "2020-04-01 09:37:08.897"}
+ self.module.update_time_config_output(backup_setting)
+ assert backup_setting == {
+ "EnableNTP": False,
+ "JobId": None,
+ "PrimaryNTPAddress": None,
+ "SecondaryNTPAddress1": None,
+ "SecondaryNTPAddress2": None,
+ "SystemTime": "2020-03-31 21:37:08.897",
+ "TimeSource": "Local Clock",
+ "TimeZone": "TZ_ID_1",
+ "TimeZoneIdLinux": "Etc/GMT+12",
+ "TimeZoneIdWindows": "Dateline Standard Time",
+ "UtcTime": "2020-04-01 09:37:08.897"}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py
new file mode 100644
index 00000000..d6fbc368
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_webserver
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_application_network_webserver(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'ome_application_network_webserver.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAppNetwork(FakeAnsibleModule):
+ module = ome_application_network_webserver
+
+ sub_param1 = {"webserver_port": 443, "webserver_timeout": 20}
+
+ @pytest.mark.parametrize("sub_param", [sub_param1])
+ def test_ome_application_network_webserver_main_success_case_01(self, mocker, ome_default_args, sub_param,
+ ome_connection_mock_for_application_network_webserver,
+ ome_response_mock):
+ ome_default_args.update(sub_param)
+ resp = {"TimeOut": 25, "PortNumber": 443, "EnableWebServer": True}
+ port_change = 0
+ mocker.patch(MODULE_PATH + "ome_application_network_webserver.get_updated_payload",
+ return_value=(resp, port_change))
+ ome_response_mock.json_data = resp
+ result = self.execute_module(ome_default_args)
+ assert result['changed'] is True
+ assert "msg" in result
+ assert "webserver_configuration" in result and result["webserver_configuration"] == resp
+ assert result["msg"] == "Successfully updated network web server configuration."
+
+ in1 = {"webserver_port": 443, "webserver_timeout": 25}
+ in2 = {"webserver_timeout": 25}
+ out1 = {"TimeOut": 25, "PortNumber": 443, "EnableWebServer": True}
+ out2 = {"TimeOut": 25, "PortNumber": 1443, "EnableWebServer": True}
+
+ @pytest.mark.parametrize("sub_param", [{"in": in1, "out": out1},
+ {"in": in2, "out": out2}])
+ def test_get_updated_payload_success1(self, sub_param, ome_default_args,
+ ome_connection_mock_for_application_network_webserver,
+ ome_response_mock):
+ ome_default_args.update(sub_param["in"])
+ ome_response_mock.json_data = {"TimeOut": 20, "PortNumber": 1443, "EnableWebServer": True,
+ "@odata.context": "$metadata#Network.WebServerConfiguration/$entity",
+ "@odata.id": "/api/ApplicationService/Network/WebServerConfiguration"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ payload, port = self.module.get_updated_payload(ome_connection_mock_for_application_network_webserver, f_module)
+ assert payload == sub_param["out"]
+
+ def _test_get_updated_payload_when_same_setting_failure_case(self, ome_default_args,
+ ome_connection_mock_for_application_network_webserver,
+ ome_response_mock):
+ new_param = {"webserver_port": 443, "webserver_timeout": 25}
+ ome_default_args.update(new_param)
+ ome_response_mock.json_data = {"TimeOut": 25, "PortNumber": 443, "EnableWebServer": True,
+ "@odata.context": "$metadata#Network.WebServerConfiguration/$entity",
+ "@odata.id": "/api/ApplicationService/Network/WebServerConfiguration"}
+ f_module = self.get_module_mock(params=ome_default_args)
+ error_message = "No changes made to the web server configuration as the entered values are the same as the" \
+ " current configuration."
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_webserver, f_module)
+
+ in1 = {"check_mode": True, "timeout": 25}
+ in2 = {"check_mode": True, "timeout": 30}
+ in3 = {"check_mode": False, "timeout": 25}
+ out1 = "No changes found to be applied to the web server."
+ out2 = "Changes found to be applied to the web server."
+ out3 = "No changes made to the web server configuration as the entered values" \
+ " are the same as the current configuration."
+
+ @pytest.mark.parametrize("sub_param", [{"in": in1, "out": out1},
+ {"in": in2, "out": out2},
+ {"in": in3, "out": out3}])
+ def test_get_updated_payload_check_mode(self, sub_param, ome_default_args,
+ ome_connection_mock_for_application_network_webserver, ome_response_mock):
+ new_param = {"webserver_port": 443, "webserver_timeout": sub_param["in"]["timeout"]}
+ ome_default_args.update(new_param)
+ ome_response_mock.json_data = {"TimeOut": 25, "PortNumber": 443, "EnableWebServer": True,
+ "@odata.context": "$metadata#Network.WebServerConfiguration/$entity",
+ "@odata.id": "/api/ApplicationService/Network/WebServerConfiguration"}
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=sub_param["in"]["check_mode"])
+ error_message = sub_param["out"]
+ with pytest.raises(Exception, match=error_message) as err:
+ self.module.get_updated_payload(ome_connection_mock_for_application_network_webserver, f_module)
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_webserver_main_error_cases(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_application_network_webserver,
+ ome_response_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ ome_default_args.update({"webserver_port": 443, "webserver_timeout": 25})
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'ome_application_network_webserver.get_updated_payload',
+ side_effect=exc_type("test"))
+ ome_default_args.update({"webserver_port": 443, "webserver_timeout": 25})
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'ome_application_network_webserver.get_updated_payload',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(
+ MODULE_PATH + 'ome_application_network_webserver.get_updated_payload',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'webserver_configuration' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py
new file mode 100644
index 00000000..ef945ae6
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py
@@ -0,0 +1,400 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.4.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_security_settings
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_application_security_settings.'
+
+SEC_JOB_TRIGGERED = "Successfully triggered the job to apply security settings."
+SEC_JOB_COMPLETE = "Successfully applied the security settings."
+FIPS_TOGGLED = "Successfully {0} the FIPS mode."
+FIPS_CONN_RESET = "The network connection may have changed. Verify the connection and try again."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+
+
+@pytest.fixture
+def ome_connection_mock_for_security_settings(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeSecuritySettings(FakeAnsibleModule):
+ module = ome_application_security_settings
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": {
+ "job_wait": False, "job_wait_timeout": 120,
+ "login_lockout_policy": {
+ "by_ip_address": False, "by_user_name": False, "lockout_fail_count": 5,
+ "lockout_fail_window": 30, "lockout_penalty_time": 900},
+ "restrict_allowed_ip_range": {
+ "enable_ip_range": False, "ip_range": None},
+ },
+ "json_data": {
+ "JobId": 1234,
+ "SystemConfiguration": {
+ "Comments": ["Export type is Normal,JSON"],
+ "Model": "", "ServiceTag": "",
+ "Components": [
+ {
+ "FQDD": "MM.Embedded.1",
+ "Attributes": [
+ {
+ "Name": "LoginSecurity.1#Id",
+ "Value": "10"
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCount",
+ "Value": 3
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCountTime",
+ "Value": 32
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutPenaltyTime",
+ "Value": 850
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeAddr",
+ "Value": None
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByUsernameEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByIPEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeEnable",
+ "Value": False
+ }
+ ]
+ }
+ ]
+ }
+ }, "msg": SEC_JOB_TRIGGERED},
+ {"module_args": {
+ "job_wait": False, "job_wait_timeout": 120,
+ "login_lockout_policy": {
+ "by_ip_address": False, "by_user_name": False, "lockout_fail_count": 5,
+ "lockout_fail_window": 30, "lockout_penalty_time": 900},
+ "restrict_allowed_ip_range": {
+ "enable_ip_range": False, "ip_range": None},
+ },
+ "json_data": {
+ "JobId": 1234,
+ "SystemConfiguration": {
+ "Comments": ["Export type is Normal,JSON"],
+ "Model": "", "ServiceTag": "",
+ "Components": [
+ {
+ "FQDD": "MM.Embedded.1",
+ "Attributes": [
+ {
+ "Name": "LoginSecurity.1#Id",
+ "Value": "10"
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCount",
+ "Value": 5
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCountTime",
+ "Value": 30
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutPenaltyTime",
+ "Value": 900
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeAddr",
+ "Value": None
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByUsernameEnable",
+ "Value": False
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByIPEnable",
+ "Value": False
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeEnable",
+ "Value": False
+ }
+ ]
+ }
+ ]
+ }
+ }, "msg": NO_CHANGES_MSG},
+ {"module_args": {
+ "job_wait": False, "job_wait_timeout": 120,
+ "login_lockout_policy": {
+ "by_ip_address": False, "by_user_name": False, "lockout_fail_count": 5,
+ "lockout_fail_window": 30, "lockout_penalty_time": 900},
+ "restrict_allowed_ip_range": {
+ "enable_ip_range": False, "ip_range": None},
+ }, "check_mode": True,
+ "json_data": {
+ "JobId": 1234,
+ "SystemConfiguration": {
+ "Comments": ["Export type is Normal,JSON"],
+ "Model": "", "ServiceTag": "",
+ "Components": [
+ {
+ "FQDD": "MM.Embedded.1",
+ "Attributes": [
+ {
+ "Name": "LoginSecurity.1#Id",
+ "Value": "10"
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCount",
+ "Value": 3
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCountTime",
+ "Value": 32
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutPenaltyTime",
+ "Value": 850
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeAddr",
+ "Value": None
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByUsernameEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByIPEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeEnable",
+ "Value": False
+ }
+ ]
+ }
+ ]
+ }
+ }, "msg": CHANGES_FOUND},
+ {"module_args": {
+ "job_wait": True, "job_wait_timeout": 120,
+ "login_lockout_policy": {
+ "by_ip_address": False, "by_user_name": False, "lockout_fail_count": 5,
+ "lockout_fail_window": 30, "lockout_penalty_time": 900},
+ "restrict_allowed_ip_range": {
+ "enable_ip_range": False, "ip_range": None},
+ },
+ "job_failed": False, "job_message": "job_message",
+ "json_data": {
+ "JobId": 1234,
+ "SystemConfiguration": {
+ "Comments": ["Export type is Normal,JSON"],
+ "Model": "", "ServiceTag": "",
+ "Components": [
+ {
+ "FQDD": "MM.Embedded.1",
+ "Attributes": [
+ {
+ "Name": "LoginSecurity.1#Id",
+ "Value": "10"
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCount",
+ "Value": 3
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCountTime",
+ "Value": 32
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutPenaltyTime",
+ "Value": 850
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeAddr",
+ "Value": None
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByUsernameEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByIPEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeEnable",
+ "Value": False
+ }
+ ]
+ }
+ ]
+ }
+ }, "msg": SEC_JOB_COMPLETE},
+ {"module_args": {
+ "job_wait": True, "job_wait_timeout": 120,
+ "login_lockout_policy": {
+ "by_ip_address": False, "by_user_name": False, "lockout_fail_count": 5,
+ "lockout_fail_window": 30, "lockout_penalty_time": 900},
+ "restrict_allowed_ip_range": {
+ "enable_ip_range": False, "ip_range": None},
+ },
+ "job_failed": True, "job_message": "job_failed",
+ "json_data": {
+ "JobId": 1234,
+ "value": [
+ {
+ "Id": 1234,
+ "StartTime": "2021-01-01 09:54:08.721",
+ "EndTime": "2021-01-01 09:54:09.022",
+ "Key": "This Chassis",
+ "Value": "job_failed_exec"
+ }
+ ],
+ "SystemConfiguration": {
+ "Comments": ["Export type is Normal,JSON"],
+ "Model": "", "ServiceTag": "",
+ "Components": [
+ {
+ "FQDD": "MM.Embedded.1",
+ "Attributes": [
+ {
+ "Name": "LoginSecurity.1#Id",
+ "Value": "10"
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCount",
+ "Value": 3
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutFailCountTime",
+ "Value": 32
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutPenaltyTime",
+ "Value": 850
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeAddr",
+ "Value": None
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByUsernameEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#LockoutByIPEnable",
+ "Value": True
+ },
+ {
+ "Name": "LoginSecurity.1#IPRangeEnable",
+ "Value": False
+ }
+ ]
+ }
+ ]
+ }
+ }, "msg": "job_failed_exec"},
+ {"module_args": {"fips_mode_enable": True},
+ "json_data": {"FipsMode": "OFF"},
+ "msg": FIPS_TOGGLED.format("enabled")},
+ {"module_args": {"fips_mode_enable": False},
+ "json_data": {"FipsMode": "ON"},
+ "msg": FIPS_TOGGLED.format("disabled")},
+ {"module_args": {"fips_mode_enable": True},
+ "json_data": {"FipsMode": "ON"},
+ "msg": NO_CHANGES_MSG},
+ {"module_args": {"fips_mode_enable": False},
+ "json_data": {"FipsMode": "ON"},
+ "msg": CHANGES_FOUND, "check_mode": True},
+ ])
+ def test_ome_application_security_success(
+ self,
+ params,
+ ome_connection_mock_for_security_settings,
+ ome_response_mock,
+ ome_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params['module_args'])
+ ome_connection_mock_for_security_settings.job_tracking.return_value = \
+ (params.get('job_failed'), params.get('job_message'))
+ result = self._run_module(
+ ome_default_args, check_mode=params.get(
+ 'check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError,
+ ValueError,
+ SSLValidationError,
+ TypeError,
+ ConnectionError,
+ HTTPError,
+ URLError])
+ def test_security_settings_main_exception_case(
+ self,
+ exc_type,
+ mocker,
+ ome_default_args,
+ ome_connection_mock_for_security_settings,
+ ome_response_mock):
+ ome_default_args.update({"restrict_allowed_ip_range": {
+ "enable_ip_range": False
+ }})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'login_security_setting',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'login_security_setting',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'login_security_setting',
+ side_effect=exc_type('http://testhost.com',
+ 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py
new file mode 100644
index 00000000..0d3504b1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py
@@ -0,0 +1,297 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.6.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_chassis_slots
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+DEVICE_REPEATED = "Duplicate device entry found for devices with identifiers {0}."
+INVALID_SLOT_DEVICE = "Unable to rename one or more slots because either the specified device is invalid or slots " \
+ "cannot be configured. The devices for which the slots cannot be renamed are: {0}."
+JOBS_TRIG_FAIL = "Unable to initiate the slot name rename jobs."
+SUCCESS_MSG = "Successfully renamed the slot(s)."
+SUCCESS_REFRESH_MSG = "The rename slot job(s) completed successfully. " \
+ "For changes to reflect, refresh the inventory task manually."
+FAILED_MSG = "Failed to rename {0} of {1} slot names."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_chassis_slots.'
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.ome.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_chassis_slots(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeChassisSlots(FakeAnsibleModule):
+ module = ome_chassis_slots
+
+ @pytest.mark.parametrize("params", [{'mparams': {"device_options": [
+ {"slot_name": "t1",
+ "device_service_tag": "ABCD1234"}]},
+ "invalid_list": set(["ABCD1234"]), "json_data": {
+ "value": [{"Id": 10053, "Identifier": "2H5DNX2", "SlotConfiguration": {"ChassisName": None}},
+ {"Id": 10054, "Type": 1000, "Identifier": "2H7HNX2",
+ "SlotConfiguration": {"DeviceType": "1000", "ChassisId": "10053", "SlotNumber": "1",
+ "SlotName": "my_840c", "SlotType": "2000"}}]},
+ 'message': INVALID_SLOT_DEVICE, "success": True},
+ {'mparams': {"device_options": [{"slot_name": "s1", "device_id": 10054},
+ {"slot_name": "s2",
+ "device_service_tag": "ABCD1234"},
+ {"slot_name": "s1", "device_id": 10052},
+ ]},
+ "invalid_list": set(["ABCD1234"]),
+ "json_data":
+ {"value": [{"Id": 10053, "Identifier": "2H5DNX2",
+ "SlotConfiguration": {"ChassisName": None}},
+ {"Id": 10054, "Type": 1000, "Identifier": "ABCD1234",
+ "SlotConfiguration": {"DeviceType": "1000", "ChassisId": "10053", "SlotNumber": "1",
+ "SlotName": "my_840c", "SlotType": "2000"}}]}, 'message': DEVICE_REPEATED,
+ "success": True}, {
+ 'mparams': {"device_options": [{"slot_name": "s1", "device_id": 10054},
+ {"slot_name": "s2",
+ "device_service_tag": "ABCD1234"},
+ {"slot_name": "s2",
+ "device_service_tag": "ABCD1234"},
+ {"slot_name": "s2",
+ "device_service_tag": "PQRS1234"}, ]},
+ "invalid_list": set(["ABCD1234"]), "json_data": {
+ "value": [{"Id": 10053, "Identifier": "2H5DNX2", "SlotConfiguration": {"ChassisName": None}},
+ {"Id": 10052, "Type": 1000, "Identifier": "PQRS1234",
+ "SlotConfiguration": {"DeviceType": "1000", "ChassisId": "10053", "SlotNumber": "1",
+ "SlotName": "my_840c", "SlotType": "2000"}},
+ {"Id": 10054, "Type": 1000, "Identifier": "ABCD1234",
+ "SlotConfiguration": {"DeviceType": "1000", "ChassisId": "10053", "SlotNumber": "1",
+ "SlotName": "my_840c", "SlotType": "2000"}}]}, 'message': DEVICE_REPEATED,
+ "success": True}, ])
+ def test_get_device_slot_config_errors(self, params, ome_connection_mock_for_chassis_slots, ome_response_mock,
+ ome_default_args, module_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_chassis_slots.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message'].format(
+ ';'.join(set(params.get("invalid_list"))))
+
+ @pytest.mark.parametrize("params", [{"json_data": {'Name': 'j1', 'Id': 24}, "slot_data": {
+ "ABC1234": {"ChassisId": "123", "SlotNumber": "1", "SlotType": "2000"}}, "failed_jobs": {}}])
+ def test_start_slot_name_jobs(
+ self, params, ome_connection_mock_for_chassis_slots, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ # ome_connection_mock_for_chassis_slots.job_submission.return_value = params['json_data']
+ ome_response_mock.json_data = params["json_data"]
+ failed_jobs = self.module.start_slot_name_jobs(
+ ome_connection_mock_for_chassis_slots, params.get('slot_data'))
+ assert failed_jobs == params['failed_jobs']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Name': 'j1', 'Id': 12, "LastRunStatus": {"Id": 2060, "Name": "Completed"}}]},
+ "slot_data": {"ABC1234": {"new_name": "s1", "SlotNumber": "1", "SlotType": "2000", "JobId": 12}},
+ "failed_jobs": {}}])
+ def test_get_job_states(
+ self, params, ome_connection_mock_for_chassis_slots, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ f_module = self.get_module_mock()
+ ome_response_mock.json_data = params["json_data"]
+ failed_jobs = self.module.get_job_states(f_module, ome_connection_mock_for_chassis_slots,
+ params.get('slot_data'))
+ assert failed_jobs == params['failed_jobs']
+
+ @pytest.mark.parametrize("params", [{'mparams': {"device_options": [{"slot_name": "my_840c", "device_id": 10054},
+ {"slot_name": "my_740c",
+ "device_service_tag": "ABCD1234"}]},
+ "json_data": {"value": [{"Id": 10053, "Identifier": "ABCD1234",
+ "SlotConfiguration": {"DeviceType": "1000",
+ "ChassisId": "10053",
+ "SlotNumber": "1",
+ "SlotName": "my_740c",
+ "SlotType": "2000"}},
+ {"Id": 10054, "Type": 1000, "Identifier": "PQRS1234",
+ "SlotConfiguration": {"DeviceType": "1000",
+ "ChassisId": "10053",
+ "SlotNumber": "1",
+ "SlotName": "my_840c",
+ "SlotType": "2000"}}]},
+ 'message': NO_CHANGES_MSG, "check_mode": True}, {'mparams': {
+ "device_options": [{"slot_name": "my_840", "device_id": 10054},
+ {"slot_name": "my_740",
+ "device_service_tag": "ABCD1234"}]},
+ "json_data": {"value": [{"Id": 10053, "Identifier": "ABCD1234",
+ "SlotConfiguration": {"DeviceType": "1000",
+ "ChassisId": "10053", "SlotNumber": "1", "SlotName": "my_740c",
+ "SlotType": "2000"}},
+ {"Id": 10054, "Type": 1000, "Identifier": "PQRS1234",
+ "SlotConfiguration": {"DeviceType": "1000", "ChassisId": "10053", "SlotNumber": "1",
+ "SlotName": "my_840c", "SlotType": "2000"}}]},
+ 'message': CHANGES_FOUND, "check_mode": True}, ])
+ def test_check_mode_idempotency(
+ self, params, ome_connection_mock_for_chassis_slots, ome_default_args):
+ ome_connection_mock_for_chassis_slots.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get(
+ 'check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{'mparams': {"device_options": [{"slot_name": "t1", "device_id": 10053},
+ {"slot_name": "t1",
+ "device_service_tag": "ABCD1234"}]},
+ "json_data": {"value": [{"Id": 10053, "Identifier": "2H5DNX2",
+ "SlotConfiguration": {"DeviceType": "1000",
+ "ChassisId": "10053",
+ "SlotNumber": "1",
+ "SlotName": "my_840c",
+ "SlotType": "2000"}},
+ {"Id": 10054, "Identifier": "ABCD1234",
+ "SlotConfiguration": {"DeviceType": "1000",
+ "ChassisId": "10053",
+ "SlotNumber": "1",
+ "SlotName": "my_840c",
+ "SlotType": "2000"}}], },
+ 'message': SUCCESS_MSG, "success": True}])
+ def test_ome_chassis_slots_success_case(self, params, ome_connection_mock_for_chassis_slots, ome_response_mock,
+ ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ # ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_chassis_slots.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_connection_mock_for_chassis_slots.job_tracking.return_value = (
+ False, "job_track_msg")
+ mocker.patch(
+ MODULE_PATH +
+ 'trigger_refresh_inventory',
+ return_value=[1])
+ mocker.patch(
+ MODULE_PATH +
+ 'start_slot_name_jobs',
+ return_value=params.get(
+ 'start_slot_name_jobs',
+ {}))
+ mocker.patch(
+ MODULE_PATH +
+ 'get_job_states',
+ return_value=params.get(
+ 'get_job_states',
+ {}))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get(
+ 'check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{'mparams': {"slot_options": [{"chassis_service_tag": "ABC1234",
+ "slots": [{"slot_name": "t1", "slot_number": 1},
+ {"slot_name": "s1",
+ "slot_number": 5}]}]},
+ "chassi": {'value': [{"Identifier": "ABC1234", "Id": 1234}]},
+ "bladeslots": {'ABC1234_1': {"SlotNumber": "1", "SlotName": "myslotty"}},
+ "storageslots": {'value': [{"SlotNumber": "5", "SlotName": "stor-slot1"}]},
+ "slot_data": {"ABC1234_1": {"SlotNumber": "1", "SlotName": "myslotty"}}}])
+ def test_slot_number_config(self, params, ome_connection_mock_for_chassis_slots, ome_response_mock,
+ ome_default_args, mocker):
+ mocker.patch(
+ MODULE_PATH + 'get_device_type',
+ return_value=params.get('chassi'))
+ mocker.patch(
+ MODULE_PATH + 'get_slot_data',
+ return_value=params.get('bladeslots'))
+ f_module = self.get_module_mock(params=params["mparams"])
+ slot_data = self.module.slot_number_config(
+ f_module, ome_connection_mock_for_chassis_slots)
+ assert slot_data == params['slot_data']
+
+ @pytest.mark.parametrize("params", [{"slot_options": {"chassis_service_tag": "ABC1234",
+ "slots": [{"slot_name": "t1", "slot_number": 1},
+ {"slot_name": "s1", "slot_number": 5}]},
+ "chass_id": 1234, "chassi": {'value': [{"Identifier": "ABC1234", "Id": 1234}]},
+ "bladeslots": {'value': [{"SlotNumber": "1", "SlotName": "blade-slot1",
+ "Id": 234}]},
+ "storageslots": {'value': [{"ChassisServiceTag": "ABC1234",
+ "SlotConfiguration": {
+ "SlotId": "123", "SlotNumber": "5",
+ "SlotName": "stor-slot1"}}]},
+ "slot_dict_diff": {'ABC1234_5': {'SlotNumber': '5', 'SlotName': 'stor-slot1',
+ 'ChassisId': 1234, 'SlotId': "123",
+ 'ChassisServiceTag': 'ABC1234',
+ 'new_name': 's1'},
+ 'ABC1234_1': {'SlotNumber': '1', 'SlotName': 'blade-slot1',
+ 'ChassisId': 1234, 'SlotId': "234",
+ "Id": 234,
+ 'ChassisServiceTag': 'ABC1234',
+ 'new_name': 't1'}}}])
+ def test_get_slot_data(self, params, ome_connection_mock_for_chassis_slots, ome_response_mock, ome_default_args,
+ mocker):
+ mocker.patch(
+ MODULE_PATH + 'get_device_type',
+ return_value=params.get('storageslots'))
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["bladeslots"]
+ ch_slots = params['slot_options']
+ f_module = self.get_module_mock()
+ slot_dict_diff = self.module.get_slot_data(f_module, ome_connection_mock_for_chassis_slots, ch_slots,
+ params['chass_id'])
+ assert slot_dict_diff == params['slot_dict_diff']
+
+ @pytest.mark.parametrize("params", [{"json_data": {'Name': 'j1', 'Id': 24}, "slot_data": {
+ "ABC1234": {"ChassisId": "123", "SlotNumber": "1", "ChassisServiceTag": "ABC1234"}}, "jobs": [1]}])
+ def test_trigger_refresh_inventory(
+ self, params, ome_connection_mock_for_chassis_slots, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ jobs = self.module.trigger_refresh_inventory(
+ ome_connection_mock_for_chassis_slots, params.get('slot_data'))
+ assert jobs == params['jobs']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_groups_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_chassis_slots, ome_response_mock):
+ ome_default_args.update(
+ {"device_options": [{"slot_name": "t1", "device_id": 1234}]})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'get_device_slot_config',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'get_device_slot_config',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_device_slot_config',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py
new file mode 100644
index 00000000..51ff166f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py
@@ -0,0 +1,1195 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.2.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_configuration_compliance_baseline
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_configuration_compliance_baseline.'
+INVALID_DEVICES = "{identifier} details are not available."
+TEMPLATE_ID_ERROR_MSG = "Template with ID '{0}' not found."
+TEMPLATE_NAME_ERROR_MSG = "Template '{0}' not found."
+NAMES_ERROR = "Only delete operations accept multiple baseline names. All the other operations accept only a single " \
+ "baseline name."
+BASELINE_CHECK_MODE_CHANGE_MSG = "Baseline '{name}' already exists."
+CHECK_MODE_CHANGES_MSG = "Changes found to be applied."
+CHECK_MODE_NO_CHANGES_MSG = "No changes found to be applied."
+BASELINE_CHECK_MODE_NOCHANGE_MSG = "Baseline '{name}' does not exist."
+CREATE_MSG = "Successfully created the configuration compliance baseline."
+DELETE_MSG = "Successfully deleted the configuration compliance baseline(s)."
+TASK_PROGRESS_MSG = "The initiated task for the configuration compliance baseline is in progress."
+CREATE_FAILURE_PROGRESS_MSG = "The initiated task for the configuration compliance baseline has failed"
+INVALID_IDENTIFIER = "Target with {identifier} {invalid_val} not found."
+IDEMPOTENCY_MSG = "The specified configuration compliance baseline details are the same as the existing settings."
+INVALID_COMPLIANCE_IDENTIFIER = "Unable to complete the operation because the entered target {0} {1}" \
+ " is not associated with the baseline '{2}'."
+INVALID_TIME = "job_wait_timeout {0} is not valid."
+REMEDIATE_MSG = "Successfully completed the remediate operation."
+MODIFY_MSG = "Successfully modified the configuration compliance baseline."
+JOB_FAILURE_PROGRESS_MSG = "The initiated task for the configuration compliance baseline has failed."
+
+device_info = {
+ "value": [
+ {
+ "Id": Constants.device_id1,
+ "Type": 2000,
+ "Identifier": Constants.service_tag1,
+ "DeviceServiceTag": Constants.service_tag1,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "DeviceCapabilities": [33, 11],
+ "ManagedState": 3000,
+ "Status": 1000,
+ "ConnectionState": True,
+ "SystemId": 2031,
+ "DeviceName": "MX-MOCK"
+ },
+ {
+ "Id": Constants.device_id2,
+ "Type": 2000,
+ "Identifier": Constants.service_tag2,
+ "DeviceServiceTag": Constants.service_tag2,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 1000,
+ "ConnectionState": True,
+ "SystemId": 2031,
+ "DeviceName": "MX-MOCK"
+ }
+ ]
+}
+
+group_info = {
+ "@odata.count": 2,
+ "value": [
+ {
+ "Id": Constants.device_id1,
+ "Name": "Network Mock",
+ },
+ {
+ "Id": Constants.device_id2,
+ "Name": "OEM Mock",
+ }
+ ]
+}
+
+baseline_info = {
+ "@odata.count": 1,
+ "value": [
+ {
+ "@odata.type": "#TemplateService.Baseline",
+ "@odata.id": "/api/TemplateService/Baselines(30)",
+ "Id": 30,
+ "Name": "baseline5",
+ "Description": None,
+ "TemplateId": 102,
+ "TemplateName": "one",
+ "TemplateType": 2,
+ "TaskId": 26606,
+ "PercentageComplete": "100",
+ "TaskStatus": 2070,
+ "LastRun": "2021-03-02 19:29:31.503",
+ "BaselineTargets": [
+ {
+ "Id": 10074,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "ConfigComplianceSummary": {
+ "ComplianceStatus": "OK",
+ "NumberOfCritical": 0,
+ "NumberOfWarning": 0,
+ "NumberOfNormal": 0,
+ "NumberOfIncomplete": 0
+ },
+ "DeviceConfigComplianceReports@odata.navigationLink": "/api/TemplateService/Baselines(30)/DeviceConfigComplianceReports"
+ }
+ ]
+}
+
+baseline_output = {
+ "Id": 30,
+ "Name": "baseline5",
+ "Description": None,
+ "TemplateId": 102,
+ "TemplateName": "one",
+ "TemplateType": 2,
+ "TaskId": 26606,
+ "PercentageComplete": "100",
+ "TaskStatus": 2070,
+ "LastRun": "2021-03-02 19:29:31.503",
+ "BaselineTargets": [
+ {
+ "Id": 10074,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "ConfigComplianceSummary": {
+ "ComplianceStatus": "OK",
+ "NumberOfCritical": 0,
+ "NumberOfWarning": 0,
+ "NumberOfNormal": 0,
+ "NumberOfIncomplete": 0
+ },
+}
+
+compliance_report = {
+ "@odata.count": 2,
+ "value": [
+ {
+ "@odata.id": "/api/TemplateService/Baselines(30)/DeviceConfigComplianceReports({0})".format(
+ Constants.device_id1),
+ "Id": Constants.device_id1,
+ "DeviceName": "mock_devicename",
+ "Model": "mock_model",
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": "COMPLIANT",
+ "DeviceType": 1000,
+ "InventoryTime": "2021-03-10 21:39:16.958627",
+ },
+ {
+ "@odata.id": "/api/TemplateService/Baselines(30)/DeviceConfigComplianceReports({0})".format(
+ Constants.device_id2),
+ "Id": Constants.device_id2,
+ "DeviceName": "mock_devicename",
+ "Model": "mock_model",
+ "ServiceTag": Constants.service_tag2,
+ "ComplianceStatus": "NONCOMPLIANT",
+ "DeviceType": 1000,
+ "InventoryTime": "2021-03-10 21:39:16.958627",
+ }
+ ]
+}
+
+
+@pytest.fixture
+def ome_connection_mock_for_compliance(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeConfigCompBaseline(FakeAnsibleModule):
+ module = ome_configuration_compliance_baseline
+
+ @pytest.mark.parametrize("params", [{"name": "baseline", "template_name": "iDRAC 13G Enable Low Latency Profile"},
+ {"name": "baseline", "template_id": 1}])
+ def test_ome_configuration_get_template_details_case1(self, params, ome_response_mock,
+ ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params=params)
+ template_info = {
+ "@odata.count": 1,
+ "value": [{
+ "Id": 1,
+ "Name": "iDRAC 13G Enable Low Latency Profile",
+ "Description": "Tune workload for High Performance Computing Environment",
+ "SourceDeviceId": 0,
+ "TypeId": 2,
+ "ViewTypeId": 4,
+ }]
+ }
+ ome_response_mock.json_data = template_info
+ template_data = self.module.get_template_details(f_module, ome_connection_mock_for_compliance)
+ assert template_data == template_info["value"][0]
+
+ @pytest.mark.parametrize("params", [{"names": "baseline", "template_name": "iDRAC 13G Enable Low Latency Profile"},
+ {"names": "baseline", "template_id": 1}])
+ def test_ome_configuration_get_template_details_case2(self, params, ome_response_mock,
+ ome_connection_mock_for_compliance):
+ """
+ when invalid template name and ids are provided
+ """
+ f_module = self.get_module_mock(params=params)
+ template_info = {
+ "@odata.count": 1,
+ "value": []
+ }
+ ome_response_mock.json_data = template_info
+ with pytest.raises(Exception) as err:
+ self.module.get_template_details(f_module, ome_connection_mock_for_compliance)
+ if "template_id" in params:
+ assert err.value.args[0] == TEMPLATE_ID_ERROR_MSG.format(params['template_id'])
+ else:
+ assert err.value.args[0] == TEMPLATE_NAME_ERROR_MSG.format(params['template_name'])
+
+ def test_validate_identifiers_case01(self):
+ """
+ No exception thrown when valid device ids are passed
+ """
+ requested_values = [Constants.device_id1, Constants.device_id2]
+ f_module = self.get_module_mock(params={"device_ids": requested_values})
+ available_values = dict([(item["Id"], item["Identifier"]) for item in device_info["value"]])
+ self.module.validate_identifiers(available_values.keys(), requested_values, "device_ids", f_module)
+
+ def test_validate_identifiers_case02(self):
+ """
+ No exception thrown when valid device se tagsrvice are passed
+ """
+ requested_values = [Constants.service_tag2, Constants.service_tag1]
+ available_values = dict([(item["Id"], item["Identifier"]) for item in device_info["value"]])
+ f_module = self.get_module_mock(params={"device_service_tags": requested_values})
+ self.module.validate_identifiers(available_values.values(), requested_values, "device_service_tags", f_module)
+
+ @pytest.mark.parametrize("val", [[Constants.service_tag1, "abc", "xyz"], ["abc", "xyz"]])
+ def test_validate_identifiers_case03(self, val):
+ """
+ Exception should be thrown when invalid service tags are passed
+ """
+ requested_values = val
+ f_module = self.get_module_mock(params={"device_service_tags": requested_values})
+ available_values = dict([(item["Id"], item["Identifier"]) for item in device_info["value"]])
+ with pytest.raises(Exception) as err:
+ self.module.validate_identifiers(available_values.values(), requested_values, "device_service_tags",
+ f_module)
+ assert err.value.args[0].find("Target with device_service_tags") != -1
+
+ def test_get_identifiers_case01(self):
+ """
+ get the device id from serivice tags
+ """
+ available_identifiers_map = dict([(item["Id"], item["Identifier"]) for item in device_info["value"]])
+ requested_values = [Constants.service_tag1]
+ val = self.module.get_identifiers(available_identifiers_map, requested_values)
+ assert val == [Constants.device_id1]
+
+ def test_get_identifiers_case02(self):
+ """
+ get the group id from group Names
+ """
+ available_identifiers_map = dict([(item["Id"], item["Name"]) for item in group_info["value"]])
+ requested_values = ["OEM Mock"]
+ val = self.module.get_identifiers(available_identifiers_map, requested_values)
+ assert val == [Constants.device_id2]
+
+ def test_get_group_ids(self, ome_connection_mock_for_compliance):
+ """
+ success case
+ """
+ f_module = self.get_module_mock(params={"device_group_names": ["OEM Mock"], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": group_info["@odata.count"], "value": group_info["value"]}
+ value = self.module.get_group_ids(f_module, ome_connection_mock_for_compliance)
+ assert value == [Constants.device_id2]
+
+ def test_get_group_ids_failure_case1(self, ome_connection_mock_for_compliance):
+ """
+ success case
+ """
+ f_module = self.get_module_mock(params={"device_group_names": ["OEM Mock Invalid"], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": group_info["@odata.count"],
+ "value": group_info["value"]
+ }
+ with pytest.raises(Exception) as err:
+ self.module.get_group_ids(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == "Target with device_group_names OEM Mock Invalid not found."
+
+ def test_get_group_ids_failure_case2(self, ome_connection_mock_for_compliance):
+ """
+ success case
+ """
+ f_module = self.get_module_mock(params={"device_group_names": ["OEM Mock Invalid"], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": group_info["@odata.count"],
+ "value": []
+ }
+ with pytest.raises(Exception) as err:
+ self.module.get_group_ids(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == INVALID_DEVICES.format(identifier="Group")
+
+ def test_get_device_ids_case01(self, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"device_ids": [Constants.device_id2, Constants.device_id1], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": device_info["value"]}
+ value, compatible_map = self.module.get_device_ids(f_module, ome_connection_mock_for_compliance)
+ assert value == [Constants.device_id2, Constants.device_id1]
+ assert compatible_map == {"capable": [Constants.device_id1], "non_capable": [Constants.device_id2]}
+
+ def test_get_device_ids_case2(self, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": device_info["value"]}
+ value, compatible_map = self.module.get_device_ids(f_module, ome_connection_mock_for_compliance)
+ assert value == [Constants.device_id1]
+ assert compatible_map == {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+
+ def test_get_device_ids_case01_failurecase(self, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_ids": [100], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": device_info["value"]}
+ with pytest.raises(Exception) as err:
+ self.module.get_device_ids(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == "Target with device_ids 100 not found."
+
+ def test_get_device_ids_case2_failure_case(self, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_service_tags": ["xyz"], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": device_info["value"]}
+ with pytest.raises(Exception) as err:
+ self.module.get_device_ids(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == "Target with device_service_tags xyz not found."
+
+ def test_get_device_ids_failure_case(self, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_ids": [Constants.device_id2], "command": "create",
+ "template_id": 2})
+ ome_connection_mock_for_compliance.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": []}
+ with pytest.raises(Exception) as err:
+ self.module.get_device_ids(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == INVALID_DEVICES.format(identifier="Device")
+
+ def test_create_payload_case1(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"device_ids": [Constants.device_id1, Constants.device_id2], "command": "create",
+ "template_id": 2, "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id1, Constants.device_id2], {}))
+ mocker.patch(MODULE_PATH + 'validate_capability',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 2, "Name": "template1"})
+ payload = self.module.create_payload(f_module, ome_connection_mock_for_compliance)
+ assert payload == {
+ "Name": "baseline1",
+ "TemplateId": 2,
+ "BaselineTargets": [{"Id": Constants.device_id1},
+ {"Id": Constants.device_id2}]
+ }
+
+ def test_create_payload_case2(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"device_service_tags": [Constants.service_tag1, Constants.service_tag2], "command": "create",
+ "template_id": 2, "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id1, Constants.device_id2], "map"))
+ mocker.patch(MODULE_PATH + 'validate_capability',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 2, "Name": "template1"})
+ payload = self.module.create_payload(f_module, ome_connection_mock_for_compliance)
+ assert payload == {
+ "Name": "baseline1",
+ "TemplateId": 2,
+ "BaselineTargets": [{"Id": Constants.device_id1},
+ {"Id": Constants.device_id2}]
+ }
+
+ def test_create_payload_case3(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_group_names": ["xyz"], "command": "create",
+ "template_id": 2, "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_group_ids',
+ return_value=[Constants.device_id1, Constants.device_id2])
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 2, "Name": "template1"})
+ payload = self.module.create_payload(f_module, ome_connection_mock_for_compliance)
+ assert payload == {
+ "Name": "baseline1",
+ "TemplateId": 2,
+ "BaselineTargets": [{"Id": Constants.device_id1},
+ {"Id": Constants.device_id2}]
+ }
+
+ def test_get_baseline_compliance_info(self, ome_connection_mock_for_compliance):
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = baseline_info
+ val = self.module.get_baseline_compliance_info(ome_connection_mock_for_compliance, "baseline5", "Name")
+ assert val == baseline_output
+
+ def test_get_baseline_compliance_info_case2(self, ome_connection_mock_for_compliance):
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = baseline_info
+ val = self.module.get_baseline_compliance_info(ome_connection_mock_for_compliance, 30, "Id")
+ assert val == baseline_output
+
+ def test_track_compliance_task_completion_case01(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"device_group_names": ["xyz"], "command": "create",
+ "template_id": 2, "names": ["baseline1"], "job_wait": True,
+ "job_wait_timeout": 600})
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ msg, info = self.module.track_compliance_task_completion(ome_connection_mock_for_compliance, 30, f_module)
+ assert msg == CREATE_MSG
+ assert info == baseline_output
+
+ def test_track_compliance_task_completion_case02(self, mocker, ome_connection_mock_for_compliance):
+ baseline_output["PercentageComplete"] = 25
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info', return_value=baseline_output)
+ f_module = self.get_module_mock(params={"device_group_names": ["xyz"], "command": "create",
+ "template_id": 2, "names": ["baseline1"], "job_wait": True,
+ "job_wait_timeout": 600})
+ msg, info = self.module.track_compliance_task_completion(ome_connection_mock_for_compliance, 30, f_module)
+ assert msg == TASK_PROGRESS_MSG
+ assert info == baseline_output
+ assert info["PercentageComplete"] == 25
+
+ def test_track_compliance_task_completion_case03(self, mocker, ome_connection_mock_for_compliance):
+ baseline_output["PercentageComplete"] = 25
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info', return_value=baseline_output)
+ f_module = self.get_module_mock(params={"device_group_names": ["xyz"], "command": "create",
+ "template_id": 2, "names": ["baseline1"], "job_wait": False,
+ "job_wait_timeout": 600})
+ msg, info = self.module.track_compliance_task_completion(ome_connection_mock_for_compliance, 30, f_module)
+ assert msg == TASK_PROGRESS_MSG
+ assert info == baseline_output
+ assert info["PercentageComplete"] == 25
+
+ @pytest.mark.parametrize('val', [True, False])
+ def test_validate_create_baseline_idempotency(self, mocker, val, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=val)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ with pytest.raises(Exception) as err:
+ self.module.validate_create_baseline_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert err.value.args[0] == BASELINE_CHECK_MODE_CHANGE_MSG.format(name=baseline_output["Name"])
+
+ def test_validate_create_baseline_idempotency_case2(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ with pytest.raises(Exception) as err:
+ self.module.validate_create_baseline_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert err.value.args[0] == BASELINE_CHECK_MODE_CHANGE_MSG.format(name="baseline5")
+
+ def test_create_baseline_case01(self, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'validate_create_baseline_idempotency',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'create_payload',
+ return_value={})
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ f_module = self.get_module_mock(params={"names": ["baseline5"], "job_wait": False, "job_wait_timeout": 600},
+ check_mode=False)
+ ome_response_mock.json_data = {"Id": 1}
+ ome_connection_mock_for_compliance.job_tracking.return_value = False, "message"
+ with pytest.raises(Exception) as err:
+ self.module.create_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == TASK_PROGRESS_MSG
+
+ @pytest.mark.parametrize("val",
+ [(False, "Job completed successfully."), (False, "other message."), (True, "message2")])
+ def test_create_baseline_case02(self, val, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'validate_create_baseline_idempotency',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'create_payload',
+ return_value={})
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ f_module = self.get_module_mock(params={"job_wait": True, "job_wait_timeout": 600}, check_mode=False)
+ ome_connection_mock_for_compliance.job_tracking.return_value = val[0], val[1]
+ ome_response_mock.json_data = {"Id": 1}
+ with pytest.raises(Exception) as err:
+ self.module.create_baseline(f_module, ome_connection_mock_for_compliance)
+ if val[0] is False and "successfully" in val[1]:
+ assert err.value.args[0] == CREATE_MSG
+ elif val[0] is False and "successfully" not in val[1]:
+ assert err.value.args[0] == val[1]
+ else:
+ assert err.value.args[0] == val[1]
+
+ def test_validate_names(self):
+ f_module = self.get_module_mock(params={"names": ["abc"]}, check_mode=False)
+ self.module.validate_names("create", f_module)
+
+ def test_validate_names_case02(self):
+ f_module = self.get_module_mock(params={"names": ["abc", "xyz"]}, check_mode=False)
+ with pytest.raises(Exception) as err:
+ self.module.validate_names("create", f_module)
+ assert err.value.args[0] == NAMES_ERROR
+
+ @pytest.mark.parametrize("command", ["create"])
+ def test_compliance_operation(self, mocker, command, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "create"}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'validate_job_time',
+ return_value=None)
+ mock_create = mocker.patch(MODULE_PATH + 'create_baseline',
+ return_value=None)
+ self.module.compliance_operation(f_module, ome_connection_mock_for_compliance)
+ assert mock_create.called
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_compliance_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_compliance, ome_response_mock):
+ ome_default_args.update({"template_name": "t1", "names": "baseline1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'compliance_operation', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'compliance_operation', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'compliance_operation',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ def test_compliance_create_argument_exception_case1(self, ome_default_args):
+ ome_default_args.update({"template_name": "t1"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "missing required arguments: names"
+
+ def test_compliance_create_argument_exception_case2(self, ome_default_args):
+ ome_default_args.update({"template_id": 1})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "missing required arguments: names"
+
+ def test_compliance_create_argument_exception_case3(self, ome_default_args):
+ ome_default_args.update({"names": "baseline1"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "command is create but any of the following are missing: template_name, template_id"
+
+ def test_compliance_create_argument_exception_case4(self, ome_default_args):
+ ome_default_args.update({"names": "baseline1", "template_name": "t1", "template_id": 1})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: template_id|template_name"
+
+ def test_compliance_create_argument_exception_case5(self, ome_default_args):
+ ome_default_args.update({"names": "baseline1", "device_ids": 1, "template_name": "t1",
+ "device_service_tags": "xyz"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_ids|device_service_tags"
+
+ def test_compliance_create_argument_exception_case6(self, ome_default_args):
+ ome_default_args.update({"names": "baseline1", "template_name": "t1", "device_ids": 1,
+ "device_group_names": "xyz"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_ids|device_group_names"
+
+ def test_compliance_create_argument_exception_case7(self, ome_default_args):
+ ome_default_args.update({"names": "baseline1", "template_name": "t1", "device_service_tags": "abc",
+ "device_group_names": "xyz"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_service_tags|device_group_names"
+
+ def test_compliance_create_argument_exception_case8(self, ome_default_args):
+ ome_default_args.update(
+ {"names": "baseline1", "template_name": "t1", "device_ids": 1, "device_service_tags": "xyz",
+ "device_group_names": "abc"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_ids|device_service_tags, " \
+ "device_ids|device_group_names, device_service_tags|device_group_names"
+
+ @pytest.mark.parametrize("command", ["delete"])
+ def test_compliance_operation_delete(self, mocker, command, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "delete"}, check_mode=False)
+ mock_delete_compliance = mocker.patch(MODULE_PATH + 'delete_compliance',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'validate_job_time',
+ return_value=None)
+ self.module.compliance_operation(f_module, ome_connection_mock_for_compliance)
+ assert mock_delete_compliance.called
+
+ def test_delete_idempotency_check_case01(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_identifiers',
+ return_value=[30])
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=False)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = baseline_info
+ val = self.module.delete_idempotency_check(f_module, ome_connection_mock_for_compliance)
+ assert val == [30]
+
+ def test_delete_idempotency_check_case02(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_identifiers',
+ return_value=[30])
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=True)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = baseline_info
+ with pytest.raises(Exception) as err:
+ self.module.delete_idempotency_check(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ def test_delete_idempotency_check_case03(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_identifiers',
+ return_value=[])
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=True)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = baseline_info
+ with pytest.raises(Exception) as err:
+ self.module.delete_idempotency_check(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_NO_CHANGES_MSG
+
+ def test_delete_compliance_case01(self, mocker, ome_connection_mock_for_compliance, ome_response_mock):
+ mocker.patch(MODULE_PATH + 'delete_idempotency_check',
+ return_value=[30])
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=False)
+ ome_response_mock.json_data = None
+ ome_response_mock.status_code = 204
+ with pytest.raises(Exception) as err:
+ self.module.delete_compliance(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == DELETE_MSG
+
+ def test_compliance_operation_modify(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=False)
+ mock_modify = mocker.patch(MODULE_PATH + 'modify_baseline',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'validate_job_time',
+ return_value=None)
+ self.module.compliance_operation(f_module, ome_connection_mock_for_compliance)
+ assert mock_modify.called
+
+ @pytest.mark.parametrize("val", [(False, "Job completed successfully."), (False, "message1"), (True, "message2")])
+ def test_modify_baseline_case01(self, val, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ payload = {
+ "Name": "baseline1",
+ "TemplateId": 2,
+ "BaselineTargets": [{"Id": Constants.device_id1},
+ {"Id": Constants.device_id2}]
+ }
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify", "job_wait": True,
+ "job_wait_timeout": 600}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ mocker.patch(MODULE_PATH + 'create_payload',
+ return_value=payload)
+ mocker.patch(MODULE_PATH + 'idempotency_check_for_command_modify',
+ return_value=None)
+ ome_connection_mock_for_compliance.job_tracking.return_value = val[0], val[1]
+ ome_response_mock.json_data = {"Id": 1}
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_compliance)
+ if val[0] is False and "successfully" in val[1]:
+ assert err.value.args[0] == MODIFY_MSG
+ elif val[0] is False and "successfully" not in val[1]:
+ assert err.value.args[0] == val[1]
+ else:
+ assert err.value.args[0] == val[1]
+
+ def test_modify_baseline_case02(self, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ payload = {
+ "Name": "baseline1",
+ "TemplateId": 2,
+ "BaselineTargets": [{"Id": Constants.device_id1},
+ {"Id": Constants.device_id2}]
+ }
+ f_module = self.get_module_mock(
+ params={"names": ["abc"], "command": "modify", "job_wait": False,
+ "job_wait_timeout": 600}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ mocker.patch(MODULE_PATH + 'create_payload',
+ return_value=payload)
+ mocker.patch(MODULE_PATH + 'idempotency_check_for_command_modify',
+ return_value=None)
+ ome_response_mock.json_data = {"Id": 1}
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == TASK_PROGRESS_MSG
+
+ def test_modify_baseline_case03(self, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value={})
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == BASELINE_CHECK_MODE_NOCHANGE_MSG.format(name="abc")
+
+ def test_modify_baseline_case04(self, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value={})
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == BASELINE_CHECK_MODE_NOCHANGE_MSG.format(name="abc")
+
+ def test_idempotency_check_for_command_modify_case1(self, mocker):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'compare_payloads',
+ return_value="diff")
+ with pytest.raises(Exception) as err:
+ self.module.idempotency_check_for_command_modify("current_payload", "expected_payload", f_module)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ def test_idempotency_check_for_command_modify_case2(self, mocker):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'compare_payloads',
+ return_value=None)
+ with pytest.raises(Exception) as err:
+ self.module.idempotency_check_for_command_modify("current_payload", "expected_payload", f_module)
+ assert err.value.args[0] == CHECK_MODE_NO_CHANGES_MSG
+
+ def test_idempotency_check_for_command_modify_case3(self, mocker):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify"}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'compare_payloads',
+ return_value={})
+ with pytest.raises(Exception) as err:
+ self.module.idempotency_check_for_command_modify("current_payload", "expected_payload", f_module)
+ assert err.value.args[0] == IDEMPOTENCY_MSG
+
+ @pytest.mark.parametrize("modify_payload", [{"Id": 29, "Name": "baselin9", "TemplateId": 102},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 103},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 102,
+ "BaselineTargets": [{"Id": 10074}]},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 102,
+ "BaselineTargets": [{"Id": 10079}]},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 102,
+ "BaselineTargets": [{"Id": 10075},
+ {"Id": 10074}]}
+ ])
+ def test_compliance_compare_payloads_diff_case_01(self, modify_payload):
+ current_payload = {
+ "Id": 29,
+ "Name": "baselin8",
+ "Description": "desc",
+ "TemplateId": 102,
+ "BaselineTargets": [
+ {
+ "Id": 10075
+ }
+ ]
+ }
+ val = self.module.compare_payloads(modify_payload, current_payload)
+ assert val is True
+
+ @pytest.mark.parametrize("current_payload", [{"Id": 29, "Name": "baselin8", "Description": "desc1"},
+ {"Id": 29, "Name": "baselin9", "TemplateId": 102},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 103},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 102,
+ "BaselineTargets": [{"Id": 10074}]},
+ {"Id": 29, "Name": "baselin8", "TemplateId": 102,
+ "BaselineTargets": [{"Id": 10079}]}])
+ def test_compliance_compare_payloads_diff_case_02(self, current_payload):
+ modify_payload = {
+ "Id": 29,
+ "Name": "baselin8",
+ "Description": "desc",
+ "TemplateId": 102,
+ "BaselineTargets": [
+ {
+ "Id": 10075
+ }
+ ]
+ }
+ val = self.module.compare_payloads(modify_payload, current_payload)
+ assert val is True
+
+ @pytest.mark.parametrize("modify_payload", [{"Id": 29, "Name": "baselin8", "TemplateId": 102},
+ {"Id": 29, "Name": "baselin8", "Description": "desc"},
+ {"Id": 29, "Name": "baselin8",
+ "BaselineTargets": [{"Id": 10075}]}])
+ def test_compliance_compare_payloads_no_diff_case_03(self, modify_payload):
+ current_payload = {
+ "Id": 29,
+ "Name": "baselin8",
+ "Description": "desc",
+ "TemplateId": 102,
+ "BaselineTargets": [
+ {
+ "Id": 10075
+ }
+ ]
+ }
+ val = self.module.compare_payloads(modify_payload, current_payload)
+ assert val is False
+
+ def test_get_ome_version(self, ome_response_mock, ome_connection_mock_for_compliance):
+ ome_response_mock.json_data = {
+ "Name": "OM Enterprise",
+ "Description": "OpenManage Enterprise",
+ "Vendor": "Dell, Inc.",
+ "ProductType": 1,
+ "Version": "3.4.1",
+ "BuildNumber": "24",
+ "OperationJobId": 0
+ }
+ version = self.module.get_ome_version(ome_connection_mock_for_compliance)
+ assert version == "3.4.1"
+
+ def validate_validate_remediate_idempotency_with_device_ids(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"device_ids": [Constants.device_id2, Constants.device_id1], "command": "remediate",
+ "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], "map"))
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": compliance_report["@odata.count"], "value": compliance_report["value"]}
+ noncomplaint_devices, baseline_info = self.module.validate_remediate_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert noncomplaint_devices == [Constants.device_id2]
+ assert baseline_info == baseline_output
+
+ def validate_validate_remediate_idempotency_with_service_tags(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"device_ids": [Constants.service_tag1, Constants.service_tag2], "command": "remediate",
+ "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], "map"))
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": compliance_report["@odata.count"], "value": compliance_report["value"]}
+ noncomplaint_devices, baseline_info = self.module.validate_remediate_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert noncomplaint_devices == [Constants.device_id2]
+ assert baseline_info == baseline_output
+
+ def validate_validate_remediate_idempotency_without_devices(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], "map"))
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": compliance_report["@odata.count"], "value": compliance_report["value"]}
+ noncomplaint_devices, baseline_info = self.module.validate_remediate_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert noncomplaint_devices == [Constants.device_id2]
+ assert baseline_info == baseline_output
+
+ def validate_validate_remediate_idempotency_wen_all_complaint(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"]})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], "map"))
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ report = {
+ "@odata.count": 2,
+ "value": [
+ {
+ "Id": Constants.device_id1,
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": "COMPLIANT"
+ },
+ {
+ "Id": Constants.device_id2,
+ "ServiceTag": Constants.service_tag2,
+ "ComplianceStatus": "COMPLIANT"
+ }
+ ]
+ }
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": report["@odata.count"], "value": report["value"]}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_NO_CHANGES_MSG
+
+ def validate_validate_remediate_idempotency_without_devices_check_mode(self, mocker,
+ ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"]}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], "map"))
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": compliance_report["@odata.count"], "value": compliance_report["value"]}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ @pytest.mark.parametrize("val", ["3.4.1", "3.4.5", "3.4.0", "3.4", "3.3", "3.3.0", "3.0.0", "2.1"])
+ def test_create_remediate_payload_case01_for_old_releases(self, val, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_ome_version',
+ return_value=val)
+ payload = self.module.create_remediate_payload([Constants.device_id1], baseline_output,
+ ome_connection_mock_for_compliance)
+ assert "TargetIds" in payload
+
+ @pytest.mark.parametrize("val", ["3.5.1", "3.5.5", "3.5.0", "3.5"])
+ def test_create_remediate_payload_case01_for_new_releases(self, val, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_ome_version',
+ return_value=val)
+ payload = self.module.create_remediate_payload([Constants.device_id1], baseline_output,
+ ome_connection_mock_for_compliance)
+ assert "DeviceIds" in payload
+
+ def test_remediate_baseline_case1(self, mocker, ome_connection_mock_for_compliance, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"], "job_wait": True, "job_wait_timeout": 600},
+ check_mode=True)
+ mocker.patch(MODULE_PATH + 'validate_remediate_idempotency',
+ return_value=([Constants.device_id1], baseline_output))
+ mocker.patch(MODULE_PATH + 'create_remediate_payload',
+ return_value="payload")
+ ome_response_mock.json_data = 1234
+ ome_connection_mock_for_compliance.job_tracking.return_value = True, "job fail message"
+ with pytest.raises(Exception) as err:
+ self.module.remediate_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == "job fail message"
+
+ def test_remediate_baseline_case2(self, mocker, ome_connection_mock_for_compliance, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"], "job_wait": True, "job_wait_timeout": 600},
+ check_mode=True)
+ mocker.patch(MODULE_PATH + 'validate_remediate_idempotency',
+ return_value=([Constants.device_id1], baseline_output))
+ mocker.patch(MODULE_PATH + 'create_remediate_payload',
+ return_value="payload")
+ ome_response_mock.json_data = 1234
+ ome_connection_mock_for_compliance.job_tracking.return_value = False, "Job completed successfully."
+ with pytest.raises(Exception) as err:
+ self.module.remediate_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == REMEDIATE_MSG
+
+ def test_remediate_baseline_case3(self, mocker, ome_connection_mock_for_compliance, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"], "job_wait": False, "job_wait_timeout": 600},
+ check_mode=True)
+ mocker.patch(MODULE_PATH + 'validate_remediate_idempotency',
+ return_value=([Constants.device_id1], baseline_output))
+ mocker.patch(MODULE_PATH + 'create_remediate_payload',
+ return_value="payload")
+ ome_response_mock.json_data = 1234
+ with pytest.raises(Exception) as err:
+ self.module.remediate_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == TASK_PROGRESS_MSG
+
+ @pytest.mark.parametrize("inparams", [{"command": "create", "names": ["baseline1"],
+ "job_wait": True, "job_wait_timeout": 0},
+ {"command": "modify", "names": ["baseline1"], "job_wait": True,
+ "job_wait_timeout": 0}])
+ def test_validate_job_time(self, inparams):
+ command = inparams['command']
+ f_module = self.get_module_mock(
+ params=inparams)
+ with pytest.raises(Exception) as err:
+ self.module.validate_job_time(command, f_module)
+ assert err.value.args[0] == INVALID_TIME.format(inparams["job_wait_timeout"])
+
+ @pytest.mark.parametrize("command", ["remediate"])
+ def test_compliance_remediate_operation(self, mocker, command, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "remediate"}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'validate_job_time',
+ return_value=None)
+ mock_remediate = mocker.patch(MODULE_PATH + 'remediate_baseline',
+ return_value=None)
+ self.module.compliance_operation(f_module, ome_connection_mock_for_compliance)
+ assert mock_remediate.called
+
+ @pytest.mark.parametrize("inparams", [{"command": "modify", "names": ["baseline1"], "job_wait": True,
+ "job_wait_timeout": 1},
+ {"command": "modify", "names": ["baseline1"], "job_wait": False,
+ "job_wait_timeout": 1},
+ {"command": "delete", "names": ["baseline1"], "job_wait": True,
+ "job_wait_timeout": 1},
+ ])
+ def test_validate_job_time_no_err_case(self, inparams):
+ command = inparams['command']
+ f_module = self.get_module_mock(
+ params=inparams)
+ self.module.validate_job_time(command, f_module)
+
+ def test_remediate_baseline_case4(self, mocker, ome_connection_mock_for_compliance, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"command": "remediate", "names": ["baseline1"], "job_wait": True, "job_wait_timeout": 600},
+ check_mode=True)
+ mocker.patch(MODULE_PATH + 'validate_remediate_idempotency',
+ return_value=([Constants.device_id1], baseline_output))
+ mocker.patch(MODULE_PATH + 'create_remediate_payload',
+ return_value="payload")
+ ome_response_mock.json_data = 1234
+ ome_connection_mock_for_compliance.job_tracking.return_value = False, "Job is running."
+ with pytest.raises(Exception) as err:
+ self.module.remediate_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == "Job is running."
+
+ def test_modify_baseline_case05(self, mocker, ome_response_mock, ome_connection_mock_for_compliance):
+ payload = {
+ "Name": "baseline1",
+ "TemplateId": 2
+ }
+ f_module = self.get_module_mock(params={"names": ["abc"], "command": "modify", "job_wait": False,
+ "job_wait_timeout": 600}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ mocker.patch(MODULE_PATH + 'create_payload',
+ return_value=payload)
+ mocker.patch(MODULE_PATH + 'idempotency_check_for_command_modify',
+ return_value=None)
+ ome_response_mock.json_data = {"Id": 1}
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == TASK_PROGRESS_MSG
+
+ def test_validate_create_baseline_idempotency_case3(self, mocker, ome_connection_mock_for_compliance):
+ f_module = self.get_module_mock(params={"names": ["baseline5"]}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value={})
+ with pytest.raises(Exception) as err:
+ self.module.validate_create_baseline_idempotency(f_module,
+ ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ def test_validate_capability_no_err_case01(self):
+ capability_map = {"capable": [Constants.device_id1], "non_capable": [Constants.device_id2], }
+ f_module = self.get_module_mock(params={"device_ids": [Constants.device_id1]}, check_mode=True)
+ self.module.validate_capability(f_module, capability_map)
+
+ def test_validate_capability_no_err_case02(self):
+ capability_map = {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+ f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]}, check_mode=True)
+ self.module.validate_capability(f_module, capability_map)
+
+ def test_validate_capability_err_case01(self):
+ NO_CAPABLE_DEVICES = "Target device_service_tags contains devices which cannot be used for a baseline " \
+ "compliance operation."
+ capability_map = {"capable": [Constants.service_tag2], "non_capable": [Constants.service_tag1]}
+ f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]}, check_mode=True)
+ with pytest.raises(Exception) as err:
+ self.module.validate_capability(f_module, capability_map)
+ assert err.value.args[0] == NO_CAPABLE_DEVICES
+
+ def test_validate_remediate_idempotency_case01(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value={})
+ f_module = self.get_module_mock(params={"names": ["name1"]}, check_mode=True)
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == BASELINE_CHECK_MODE_NOCHANGE_MSG.format(name="name1")
+
+ def test_validate_remediate_idempotency_case02(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ compliance_status = [
+ {
+ "Id": Constants.device_id1,
+ "DeviceName": "XX.XXX.X.XXX",
+ "IpAddresses": [
+ "XX.XXX.X.XXX"
+ ],
+ "Model": "PowerEdge MX840c",
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": 1,
+ "DeviceType": 1000,
+ "InventoryTime": "2020-10-05 18:28:09.842072"
+ }
+ ]
+ f_module = self.get_module_mock(params={"names": ["name1"], "device_ids": [Constants.device_id1]},
+ check_mode=True)
+ capability_map = {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], capability_map))
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": 1, "value": compliance_status}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_NO_CHANGES_MSG
+
+ def test_validate_remediate_idempotency_case03(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ compliance_status = [
+ {
+ "Id": Constants.device_id1,
+ "DeviceName": "XX.XXX.X.XXX",
+ "IpAddresses": [
+ "XX.XXX.X.XXX"
+ ],
+ "Model": "PowerEdge MX840c",
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": 2,
+ "DeviceType": 1000,
+ "InventoryTime": "2020-10-05 18:28:09.842072"
+ }
+ ]
+ f_module = self.get_module_mock(params={"names": ["name1"], "device_ids": [Constants.device_id1]},
+ check_mode=True)
+ capability_map = {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], capability_map))
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": 1, "value": compliance_status}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ def test_validate_remediate_idempotency_case04(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ compliance_status = [
+ {
+ "Id": Constants.device_id1,
+ "DeviceName": "XX.XXX.X.XXX",
+ "IpAddresses": [
+ "XX.XXX.X.XXX"
+ ],
+ "Model": "PowerEdge MX840c",
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": 2,
+ "DeviceType": 1000,
+ "InventoryTime": "2020-10-05 18:28:09.842072"
+ }
+ ]
+ f_module = self.get_module_mock(params={"names": ["name1"], "device_service_tags": [Constants.service_tag1]},
+ check_mode=True)
+ capability_map = {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], capability_map))
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": 1, "value": compliance_status}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
+
+ def test_validate_remediate_idempotency_case05(self, mocker, ome_connection_mock_for_compliance):
+ mocker.patch(MODULE_PATH + 'get_baseline_compliance_info',
+ return_value=baseline_output)
+ compliance_status = [
+ {
+ "Id": Constants.device_id1,
+ "DeviceName": "XX.XXX.X.XXX",
+ "IpAddresses": [
+ "XX.XXX.X.XXX"
+ ],
+ "Model": "PowerEdge MX840c",
+ "ServiceTag": Constants.service_tag1,
+ "ComplianceStatus": 2,
+ "DeviceType": 1000,
+ "InventoryTime": "2020-10-05 18:28:09.842072"
+ }
+ ]
+ f_module = self.get_module_mock(params={"names": ["name1"]},
+ check_mode=True)
+ capability_map = {"capable": [Constants.service_tag1], "non_capable": [Constants.service_tag2]}
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=([Constants.device_id2, Constants.device_id1], capability_map))
+ ome_connection_mock_for_compliance.get_all_items_with_pagination.return_value = {
+ "total_count": 1, "value": compliance_status}
+ with pytest.raises(Exception) as err:
+ self.module.validate_remediate_idempotency(f_module, ome_connection_mock_for_compliance)
+ assert err.value.args[0] == CHECK_MODE_CHANGES_MSG
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py
new file mode 100644
index 00000000..b038b119
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py
@@ -0,0 +1,87 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.1.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_configuration_compliance_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_configuration_compliance_info.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_compliance_info(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ ome_connection_mock_obj.get_all_items_with_pagination.return_value = {"value": []}
+ return ome_connection_mock_obj
+
+
+class TestBaselineComplianceInfo(FakeAnsibleModule):
+ module = ome_configuration_compliance_info
+
+ def test_validate_device(self, ome_connection_mock_for_compliance_info):
+ value_list = [{"Id": 25011, "ServiceTag": "FGHREF"}]
+ report = ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {"value": value_list}
+ f_module = self.get_module_mock(params={'baseline': "baseline_one", "device_id": 25011})
+ device = self.module.validate_device(f_module, report,
+ device_id=25011, service_tag=None, base_id=None)
+ service_tag = self.module.validate_device(f_module, report,
+ device_id=None, service_tag="FGHREF", base_id=None)
+ with pytest.raises(Exception) as exc:
+ self.module.validate_device(f_module, report,
+ device_id=25012, service_tag=None, base_id=None)
+ assert device == 25011
+ assert service_tag == 25011
+ assert exc.value.args[0] == "Unable to complete the operation because the entered " \
+ "target device id or service tag '25012' is invalid."
+
+ def test_get_baseline_id(self, ome_connection_mock_for_compliance_info):
+ report_list = [{"Id": 1, "Name": "baseline_one", "TemplateId": 1}]
+ ome_connection_mock_for_compliance_info.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={'baseline': "baseline_one"})
+ base_id, template_id = self.module.get_baseline_id(f_module, "baseline_one", ome_connection_mock_for_compliance_info)
+ with pytest.raises(Exception) as exc:
+ self.module.get_baseline_id(f_module, "baseline_two", ome_connection_mock_for_compliance_info)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered " \
+ "target baseline name 'baseline_two' is invalid."
+ assert base_id == 1
+
+ def test_compliance_report(self, ome_connection_mock_for_compliance_info, mocker, ome_response_mock):
+ value_list = [{"Id": 25011, "TemplateId": 1}]
+ ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {"value": value_list}
+ mocker.patch(MODULE_PATH + "get_baseline_id", return_value=25011)
+ f_module = self.get_module_mock(params={'baseline': "baseline_one"})
+ ome_response_mock.json_data = {"value": [{"Id": 25011, "TemplateId": 1}]}
+ mocker.patch(MODULE_PATH + 'get_baseline_id', return_value=(1, 1))
+ report = self.module.compliance_report(f_module, ome_connection_mock_for_compliance_info)
+ assert report == [{'Id': 25011, 'ComplianceAttributeGroups': None, 'TemplateId': 1}]
+
+ def test_main_exception(self, ome_connection_mock_for_compliance_info, mocker,
+ ome_response_mock, ome_default_args):
+ ome_default_args.update({"baseline": "baseline_one", "device_id": 25011})
+ response = mocker.patch(MODULE_PATH + 'compliance_report')
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"report": "compliance_report"}
+ report = self._run_module(ome_default_args)
+ assert report["changed"] is False
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py
new file mode 100644
index 00000000..f92a0abe
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py
@@ -0,0 +1,602 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ssl import SSLError
+from io import StringIO
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_group
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+
+netaddr = pytest.importorskip("netaddr")
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_group.'
+ADD_STATIC_GROUP_MESSAGE = "Devices can be added only to the static device groups created using OpenManage Enterprise."
+REMOVE_STATIC_GROUP_MESSAGE = "Devices can be removed only from the static device groups created using OpenManage Enterprise."
+INVALID_IP_FORMAT = "The format {0} of the IP address provided is not supported or invalid."
+IP_NOT_EXISTS = "The IP addresses provided do not exist in OpenManage Enterprise."
+try:
+ from netaddr import IPAddress, IPNetwork, IPRange
+ from netaddr.core import AddrFormatError
+
+ HAS_NETADDR = True
+except ImportError:
+ HAS_NETADDR = False
+
+
+@pytest.fixture
+def ome_connection_mock_for_device_group(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ return ome_connection_mock_obj
+
+
+class TestOMEDeviceGroup(FakeAnsibleModule):
+ module = ome_device_group
+
+ def test_ome_device_group_get_group_id_case01(self, ome_connection_mock_for_device_group, ome_response_mock):
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_ids": [25011], "device_service_tags": []})
+ ome_response_mock.json_data = {"value": []}
+ with pytest.raises(Exception) as exc:
+ self.module.get_group_id(ome_connection_mock_for_device_group, f_module)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered " \
+ "target group name 'Storage Services' is invalid."
+ ome_response_mock.json_data = {"value": [{"Id": 25011, "CreatedBy": "user",
+ "TypeId": 3000, "MembershipTypeId": 12}]}
+ resp = self.module.get_group_id(ome_connection_mock_for_device_group, f_module)
+ assert resp == 25011
+
+ def test_ome_device_group_get_group_id_case02(self, ome_connection_mock_for_device_group, ome_response_mock):
+ f_module = self.get_module_mock(params={"group_id": 1234,
+ "device_ids": [25011], "device_service_tags": []})
+ ome_connection_mock_for_device_group.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(to_text(json.dumps(
+ {"info": "error_details"}))))
+ with pytest.raises(Exception) as exc1:
+ self.module.get_group_id(ome_connection_mock_for_device_group, f_module)
+ assert exc1.value.args[0] == "Unable to complete the operation because the entered " \
+ "target group Id '1234' is invalid."
+
+ def test_ome_device_group_get_group_id_case03(self, ome_connection_mock_for_device_group, ome_response_mock):
+ f_module = self.get_module_mock(params={"group_id": 1234,
+ "device_ids": [25011], "device_service_tags": []})
+ ome_response_mock.json_data = {"Id": 1234, "CreatedBy": "user",
+ "TypeId": 3000, "MembershipTypeId": 12}
+ resp = self.module.get_group_id(ome_connection_mock_for_device_group, f_module)
+ assert resp == 1234
+
+ def test_ome_device_group_get_device_id(self, ome_connection_mock_for_device_group):
+ report_list = [{"Id": 25011, "DeviceServiceTag": "SEFRG2"}, {"Id": 25012, "DeviceServiceTag": "SEFRG3"}]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_ids": [25011, 25012]})
+ device_list, key = self.module.get_device_id(ome_connection_mock_for_device_group, f_module)
+ assert device_list == [25011, 25012]
+ assert key == "Id"
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_service_tags": ["SEFRG2", "SEFRG3"]})
+ device_list, key = self.module.get_device_id(ome_connection_mock_for_device_group, f_module)
+ assert device_list == [25011, 25012]
+ assert key == "DeviceServiceTag"
+
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_ids": [25011, 25000]})
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_id(ome_connection_mock_for_device_group, f_module)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id(s) '25000' are invalid."
+
+ def test_ome_device_group_add_member_to_group(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 25011, "DeviceServiceTag": "SEFRG2"}]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_ids": [25011]})
+ ome_response_mock.status_code = 204
+ ome_response_mock.success = True
+ with pytest.raises(Exception) as exc:
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group,
+ 1, [25011], "Id")
+ assert exc.value.args[0] == "No changes found to be applied."
+
+ f_module.check_mode = True
+ with pytest.raises(Exception) as exc:
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group,
+ 1, [25011], "Id")
+ assert exc.value.args[0] == "No changes found to be applied."
+
+ f_module.check_mode = False
+ report_list = [{"Id": 25013, "DeviceServiceTag": "SEFRG4"}, {"Id": 25014, "DeviceServiceTag": "SEFRG5"}]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ resp, [] = self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group,
+ 1, [25011, 25012], "Id")
+ assert resp.status_code == 204
+
+ f_module.check_mode = True
+ with pytest.raises(Exception) as exc:
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group,
+ 1, [25011, 25012], "Id")
+ assert exc.value.args[0] == "Changes found to be applied."
+
+ def test_ome_device_group_main_exception(self, ome_connection_mock_for_device_group, mocker,
+ ome_response_mock, ome_default_args):
+ ome_default_args.update({"name": "Storage Services", "device_ids": [25011, 25012]})
+ ome_response_mock.status_code = 204
+ ome_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'get_group_id', return_value=1)
+ mocker.patch(MODULE_PATH + 'get_device_id', return_value=[25011, 25012])
+ mocker.patch(MODULE_PATH + 'add_member_to_group', return_value=(ome_response_mock, []))
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == "Successfully added member(s) to the device group."
+
+ def test_ome_device_group_argument_exception_case1(self, ome_default_args):
+ ome_default_args.update({"name": "Storage Services", "device_ids": [25011, 25012], "group_id": 1234})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: name|group_id"
+
+ def test_ome_device_group_argument_exception_case2(self, ome_default_args):
+ ome_default_args.update(
+ {"device_ids": [25011, 25012], "group_id": 1234, "device_service_tags": [Constants.service_tag1]})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_ids|device_service_tags|ip_addresses"
+
+ def test_ome_device_group_argument_exception_case3(self, ome_default_args):
+ ome_default_args.update({"device_ids": [25011, 25012]})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "one of the following is required: name, group_id"
+
+ def test_ome_device_group_argument_exception_case4(self, ome_default_args):
+ ome_default_args.update(
+ {"group_id": 1234})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "one of the following is required: device_ids, device_service_tags, ip_addresses"
+
+ def test_ome_device_group_argument_exception_case5(self, ome_default_args):
+ ome_default_args.update(
+ {"device_ids": None, "group_id": 1234, "device_service_tags": None})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: device_ids|device_service_tags|ip_addresses"
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_group_argument_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_device_group,
+ ome_response_mock):
+ ome_default_args.update({"name": "Storage Services", "device_ids": [25011, 25012]})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_group_id', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_group_id', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_group_id',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ @pytest.mark.parametrize("inp", [{"TypeId": 3000, "MembershipTypeId": 24},
+ {"TypeId": 1000, "MembershipTypeId": 24},
+ {"TypeId": 2000, "MembershipTypeId": 12}])
+ def test_validate_group_case01(self, inp, ome_response_mock):
+ group_resp = {"Id": 25011, "CreatedBy": "user", "TypeId": inp["TypeId"],
+ "MembershipTypeId": inp["MembershipTypeId"]}
+ f_module = self.get_module_mock(params={"name": "group1",
+ "device_ids": [25011]})
+ with pytest.raises(Exception) as exc:
+ self.module.validate_group(group_resp, f_module, "name", "group1")
+ assert exc.value.args[0] == ADD_STATIC_GROUP_MESSAGE
+
+ @pytest.mark.parametrize("inp", [{"TypeId": 3000, "MembershipTypeId": 24},
+ {"TypeId": 1000, "MembershipTypeId": 24},
+ {"TypeId": 2000, "MembershipTypeId": 12}])
+ def test_validate_group_case02(self, inp, ome_response_mock):
+ group_resp = {"Id": 25011, "CreatedBy": "user", "TypeId": inp["TypeId"],
+ "MembershipTypeId": inp["MembershipTypeId"]}
+ f_module = self.get_module_mock(params={"name": "group1",
+ "device_ids": [25011],
+ "state": "absent"})
+ with pytest.raises(Exception) as exc:
+ self.module.validate_group(group_resp, f_module, "name", "group1")
+ assert exc.value.args[0] == REMOVE_STATIC_GROUP_MESSAGE
+
+ @pytest.mark.parametrize("inp,out", [(['192.168.2.0'], [IPAddress('192.168.2.0')]),
+ (['fe80::ffff:ffff:ffff:ffff'], [IPAddress('fe80::ffff:ffff:ffff:ffff')]),
+ (['192.168.2.0/24'], [IPNetwork('192.168.2.0/24')]),
+ (['fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff'],
+ [IPRange('fe80::ffff:ffff:ffff:1111', 'fe80::ffff:ffff:ffff:ffff')]),
+ (['192.168.2.0', 'fe80::ffff:ffff:ffff:ffff',
+ '192.168.2.0/24', 'fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff',
+ '2002:c000:02e6::1/48'], [IPAddress('192.168.2.0'),
+ IPAddress('fe80::ffff:ffff:ffff:ffff'),
+ IPNetwork('192.168.2.0/24'),
+ IPRange('fe80::ffff:ffff:ffff:1111',
+ 'fe80::ffff:ffff:ffff:ffff'),
+ IPNetwork(
+ '2002:c000:02e6::1/48')])])
+ def test_get_all_ips_success_case(self, inp, out):
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": inp})
+ res = self.module.get_all_ips(inp, f_module)
+ assert res == out
+
+ @pytest.mark.parametrize("inp", [["abc"], [""], ["266.128"], ["100:1bcd:xyz"], ["192.168.0.0--192.168.0.1"],
+ ["-192.168.0.0-192.168.0.1"], ["-192.168.0.0192.168.0.1"],
+ ["192.168.0.0-192.168.0.1-"], ["192.168.0.0192.168.0.1-"],
+ ["192.168.0.1//24"],
+ ["\192.168.0.1//24"],
+ ["192.168.0.1/\24"],
+ ["/192.168.0.1/24"],
+ ["1.12.1.36/255.255.255.88"]],
+ ids=["abc", "", "266.128", "100:1bcd:xyz", "192.168.0.0--192.168.0.1",
+ "-192.168.0.0-192.168.0.1", "-192.168.0.0192.168.0.1", "192.168.0.0-192.168.0.1-",
+ "192.168.0.0192.168.0.1-", "192.168.0.1//24", "\192.168.0.1//24",
+ "192.168.0.1/\24", "/192.168.0.1/24", "1.12.1.36/255.255.255.88"])
+ def test_get_all_ips_failure_case(self, inp):
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": inp})
+ with pytest.raises(Exception, match=INVALID_IP_FORMAT.format(inp[0])) as err:
+ self.module.get_all_ips(inp, f_module)
+
+ def test_get_device_id_from_ip_success_case(self):
+ device_list = [
+ {
+ "Id": 1111,
+ "Identifier": "device1",
+ "DeviceServiceTag": "device1",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.2.255",
+ }
+ ],
+ },
+ {
+ "Id": 2222,
+ "Identifier": "device2",
+ "DeviceServiceTag": "device2",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.4.10",
+ }
+ ],
+ },
+ {
+ "Id": 3333,
+ "Identifier": "device3",
+ "DeviceServiceTag": "device3",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.2.10",
+ }
+ ],
+ },
+ {
+ "Id": 4444,
+ "Identifier": "device4",
+ "DeviceServiceTag": "device4",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.3.10",
+ }
+ ],
+ },
+ {
+ "Id": 5555,
+ "Identifier": "device5",
+ "DeviceServiceTag": "device5",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.4.3",
+ }
+ ],
+ },
+ {
+ "Id": 6666,
+ "Identifier": "device6",
+ "DeviceServiceTag": "device6",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.3.11",
+ }
+ ],
+ },
+ {
+ "Id": 7777,
+ "Identifier": "device7",
+ "DeviceServiceTag": "device7",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.3.0",
+ }
+ ],
+ },
+ {
+ "Id": 8888,
+ "Identifier": "device8",
+ "DeviceServiceTag": "device8",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.4.1",
+ }
+ ],
+ },
+ {
+ "Id": 9999,
+ "Identifier": "device9",
+ "DeviceServiceTag": "device9",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.4.5",
+ }
+ ],
+ },
+ {
+ "Id": 1010,
+ "Identifier": "device10",
+ "DeviceServiceTag": "device10",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.4.9",
+ }
+ ],
+ },
+ {
+ "Id": 1011,
+ "Identifier": "device11",
+ "DeviceServiceTag": "device11",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "[fe80::de0:b6b3:a764:0]",
+ }
+ ],
+ },
+ {
+ "Id": 1012,
+ "Identifier": "device11",
+ "DeviceServiceTag": "device11",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "[fe90::de0:b6b3:a764:0]",
+ }
+ ],
+ }
+ ]
+ output = {3333: "192.168.2.10", 4444: "192.168.3.10",
+ 5555: "192.168.4.3", 6666: "192.168.3.11", 7777: "192.168.3.0",
+ 8888: "192.168.4.1", 9999: "192.168.4.5", 1010: "192.168.4.9",
+ 1011: "fe80::de0:b6b3:a764:0"}
+ ip_addresses = [IPNetwork("::ffff:192.168.2.0/125"), IPAddress("192.168.2.10"),
+ IPAddress('fe80::ffff:ffff:ffff:ffff'),
+ IPNetwork('fe80::ffff:ffff:ffff:ffff/24'),
+ IPNetwork('192.168.3.0/24'), IPRange('192.168.4.1', '192.168.4.9')]
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["::ffff:192.168.2.0/125",
+ "192.168.2.10",
+ 'fe80::ffff:ffff:ffff:ffff',
+ '192.168.3.0/24',
+ '192.168.4.1-192.168.4.9',
+ 'fe80::ffff:ffff:ffff:ffff/24']})
+ res = self.module.get_device_id_from_ip(ip_addresses, device_list, f_module)
+ assert res == output
+
+ def test_get_device_id_from_ip_failure_case(self):
+ device_list = [
+ {
+ "Id": 1111,
+ "Identifier": "device1",
+ "DeviceServiceTag": "device1",
+ "DeviceManagement": [
+ {
+ "NetworkAddress": "192.168.2.255",
+ }
+ ],
+ },
+ ]
+ ip_addresses = [IPNetwork("::ffff:192.168.2.0/125"), IPAddress("192.168.2.10"),
+ IPAddress('fe80::ffff:ffff:ffff:ffff'),
+ IPNetwork('fe80::ffff:ffff:ffff:ffff/24'),
+ IPNetwork('192.168.3.0/24'), IPRange('192.168.4.1', '192.168.4.9')]
+ with pytest.raises(Exception, match=IP_NOT_EXISTS):
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["::ffff:192.168.2.0/125",
+ "192.168.2.10",
+ 'fe80::ffff:ffff:ffff:ffff',
+ '192.168.3.0/24',
+ '192.168.4.1-192.168.4.9',
+ 'fe80::ffff:ffff:ffff:ffff/24']})
+ self.module.get_device_id_from_ip(ip_addresses, device_list, f_module)
+
+ # def test_add_member_to_group_case01(self, ome_connection_mock_for_device_group, ome_response_mock):
+ # report_list = [{"Id": 3333, "DeviceServiceTag": "device1",
+ # "DeviceManagement": [{"NetworkAddress": "192.168.2.10"},
+ # ]},
+ # {"Id": 1013, "DeviceServiceTag": "device1",
+ # "DeviceManagement": [{"NetworkAddress": "192.168.5.10"},
+ # ]}
+ # ]
+ # ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ # f_module = self.get_module_mock(params={"name": "group1",
+ # "ip_addresses": ["::ffff:192.168.2.0/125",
+ # "192.168.2.10",
+ # 'fe80::ffff:ffff:ffff:ffff',
+ # '192.168.3.0/24',
+ # '192.168.4.1-192.168.4.9',
+ # 'fe80::ffff:ffff:ffff:ffff/24']})
+ # device_id = {3333: "192.168.2.10", 4444: "192.168.3.10",
+ # 5555: "192.168.4.3",
+ # 1011: "fe80::de0:b6b3:a764:0"}
+ # ome_response_mock.status_code = 204
+ # added_ips_out = ["192.168.3.10", "192.168.4.3", "fe80::de0:b6b3:a764:0"]
+ # resp, added_ips = self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group, 1, device_id,
+ # "IPAddresses")
+ # assert resp.status_code == 204
+ # assert added_ips == added_ips_out
+
+ def test_add_member_to_group_checkmode_case01(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 3333, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.2.10"},
+ ]},
+ {"Id": 1013, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.5.10"},
+ ]}
+ ]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["::ffff:192.168.2.0/125",
+ "192.168.2.10",
+ 'fe80::ffff:ffff:ffff:ffff',
+ '192.168.3.0/24',
+ '192.168.4.1-192.168.4.9',
+ 'fe80::ffff:ffff:ffff:ffff/24']}, check_mode=True)
+ device_id = {3333: "192.168.2.10", 4444: "192.168.3.10",
+ 5555: "192.168.4.3",
+ 1011: "fe80::de0:b6b3:a764:0"}
+ with pytest.raises(Exception, match="Changes found to be applied."):
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group, 1, device_id, "IPAddresses")
+
+ def test_add_member_to_group_checkmode_case02(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 3333, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.2.10"},
+ ]},
+ {"Id": 1013, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.5.10"},
+ ]}
+ ]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["192.168.2.10"]}, check_mode=True)
+ device_id = {3333: "192.168.2.10"}
+ with pytest.raises(Exception, match="No changes found to be applied."):
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group, 1, device_id, "IPAddresses")
+
+ def test_add_member_to_group_idempotency_case(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 3333, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.2.10"},
+ ]},
+ {"Id": 1013, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.5.10"},
+ ]}
+ ]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["192.168.2.10"]})
+ device_id = {3333: "192.168.2.10"}
+ with pytest.raises(Exception) as exc:
+ self.module.add_member_to_group(f_module, ome_connection_mock_for_device_group, 1, device_id, "IPAddresses")
+
+ assert exc.value.args[0] == "No changes found to be applied."
+
+ def test_ome_device_group_main_ip_address_case(self, ome_connection_mock_for_device_group, mocker,
+ ome_response_mock, ome_default_args):
+ ome_default_args.update({"name": "Storage Services", "ip_addresses": ["192.168.2.10"]})
+ ome_response_mock.status_code = 204
+ ome_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'get_group_id', return_value=1)
+ mocker.patch(MODULE_PATH + 'get_device_id', return_value=[25011, 25012])
+ mocker.patch(MODULE_PATH + 'add_member_to_group', return_value=(ome_response_mock, ["192.168.2.10"]))
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == "Successfully added member(s) to the device group."
+ assert result['ip_addresses_added'] == ["192.168.2.10"]
+
+ def test_get_device_id_ip_address_case(self, ome_connection_mock_for_device_group, mocker):
+ f_module = self.get_module_mock(params={"name": "group1",
+ "ip_addresses": ["192.168.2.10"]})
+ mocker.patch(MODULE_PATH + 'get_all_ips', return_value=[IPAddress("192.168.2.10")])
+ mocker.patch(MODULE_PATH + 'get_device_id_from_ip', return_value={1111: "192.168.2.10"})
+ each_device_list, key = self.module.get_device_id(ome_connection_mock_for_device_group, f_module)
+ assert key == "IPAddresses"
+ assert each_device_list == {1111: "192.168.2.10"}
+
+ def test_get_current_member_of_group(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 3333, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.2.10"},
+ ]},
+ {"Id": 1013, "DeviceServiceTag": "device1",
+ "DeviceManagement": [{"NetworkAddress": "192.168.5.10"},
+ ]}
+ ]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ group_id = 1011
+ device_id_list = self.module.get_current_member_of_group(ome_connection_mock_for_device_group, group_id)
+ assert device_id_list == [3333, 1013]
+
+ def test_ome_device_group_remove_member_from_group(self, ome_connection_mock_for_device_group, ome_response_mock):
+ report_list = [{"Id": 25011, "DeviceServiceTag": "SEFRG2"}]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ f_module = self.get_module_mock(params={"name": "Storage Services",
+ "device_ids": [25011],
+ "state": "absent"})
+ group_id = 1011
+ device_ids = [25011]
+ current_device_list = [25011]
+ ome_response_mock.status_code = 204
+ ome_response_mock.success = True
+ resp = self.module.remove_member_from_group(f_module, ome_connection_mock_for_device_group,
+ group_id, device_ids, current_device_list)
+ assert resp.status_code == 204
+
+ f_module.check_mode = True
+ with pytest.raises(Exception, match="Changes found to be applied.") as exc:
+ self.module.remove_member_from_group(f_module, ome_connection_mock_for_device_group,
+ group_id, device_ids, current_device_list)
+
+ f_module.check_mode = False
+ report_list = [{"Id": 25013, "DeviceServiceTag": "SEFRG4"}, {"Id": 25014, "DeviceServiceTag": "SEFRG5"}]
+ device_ids = [10000, 24000, 25013, 12345, 25014]
+ current_device_list = [25013, 25014]
+ ome_connection_mock_for_device_group.get_all_report_details.return_value = {"report_list": report_list}
+ resp = self.module.remove_member_from_group(f_module, ome_connection_mock_for_device_group,
+ group_id, device_ids, current_device_list)
+ assert resp.status_code == 204
+
+ current_device_list = [25013, 25014]
+ device_ids = [25011]
+ f_module.check_mode = True
+ with pytest.raises(Exception, match="No changes found to be applied.") as exc:
+ self.module.remove_member_from_group(f_module, ome_connection_mock_for_device_group,
+ group_id, device_ids, current_device_list)
+
+ current_device_list = [25013, 25014]
+ f_module.check_mode = False
+ device_ids = []
+ with pytest.raises(Exception, match="No changes found to be applied.") as exc:
+ self.module.remove_member_from_group(f_module, ome_connection_mock_for_device_group,
+ group_id, device_ids, current_device_list)
+
+ def test_ome_device_group_main_absent_case(self, ome_connection_mock_for_device_group, mocker,
+ ome_response_mock, ome_default_args):
+ ome_default_args.update({"name": "Storage Services", "device_ids": [25011, 25012], "state": "absent"})
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'get_group_id', return_value=1)
+ mocker.patch(MODULE_PATH + 'get_device_id', return_value=[25011, 25012])
+ mocker.patch(MODULE_PATH + 'get_current_member_of_group', return_value=[25011, 25012])
+ mocker.patch(MODULE_PATH + 'remove_member_from_group', return_value=(ome_response_mock))
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == "Successfully removed member(s) from the device group."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py
new file mode 100644
index 00000000..bb41b51a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py
@@ -0,0 +1,281 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible.module_utils.six.moves.urllib.error import HTTPError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+resource_basic_inventory = {"basic_inventory": "DeviceService/Devices"}
+resource_detailed_inventory = {"detailed_inventory:": {"device_id": {Constants.device_id1: None},
+ "device_service_tag": {
+ Constants.device_id2: Constants.service_tag1}}}
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestOmeDeviceInfo(FakeAnsibleModule):
+ module = ome_device_info
+
+ @pytest.fixture
+ def validate_device_inputs_mock(self, mocker):
+ validate_device_inputs_mock = mocker.patch(MODULE_PATH + 'ome_device_info._validate_inputs')
+ validate_device_inputs_mock.return_value = None
+
+ @pytest.fixture
+ def get_device_resource_parameters_mock(self, mocker):
+ response_class_mock = mocker.patch(MODULE_PATH + 'ome_device_info._get_resource_parameters',
+ return_value=resource_basic_inventory)
+ return response_class_mock
+
+ def test_main_basic_inventory_success_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ ome_response_mock.json_data = {"@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 1}
+ increment_device_details = {"resp_obj": ome_response_mock,
+ "report_list": [{"DeviceServiceTag": Constants.service_tag1,
+ "Id": Constants.device_id1}]}
+ ome_connection_mock.get_all_report_details.return_value = increment_device_details
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'device_info' in result
+ assert result["device_info"] == {"@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 1,
+ "value": [{"DeviceServiceTag": Constants.service_tag1,
+ "Id": Constants.device_id1}]}
+
+ def test_main_basic_inventory_query_param_success_case(self, mocker, ome_default_args, module_mock,
+ validate_device_inputs_mock, ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ quer_param_mock = mocker.patch(MODULE_PATH + 'ome_device_info._get_query_parameters')
+ quer_param_mock.return_value = {"filter": "Type eq '1000'"}
+ ome_response_mock.json_data = {"value": [{"device_id1": "details", "device_id2": "details"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'device_info' in result
+ assert result["device_info"] == {"value": [{"device_id1": "details", "device_id2": "details"}]}
+
+ def test_main_basic_inventory_failure_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ ome_response_mock.status_code = 500
+ ome_response_mock.json_data = {"@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 0}
+ ome_connection_mock.get_all_report_details.return_value = {"resp_obj": ome_response_mock, "report_list": []}
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == 'No devices present.'
+
+ def test_main_detailed_inventory_success_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ ome_default_args.update(
+ {"fact_subset": "detailed_inventory", "system_query_options": {"device_id": [Constants.device_id1],
+ "device_service_tag": [
+ Constants.service_tag1]}})
+ detailed_inventory = {"detailed_inventory:": {
+ "device_id": {Constants.device_id1: "DeviceService/Devices(Constants.device_id1)/InventoryDetails"},
+ "device_service_tag": {Constants.service_tag1: "DeviceService/Devices(4321)/InventoryDetails"}}}
+ get_device_resource_parameters_mock.return_value = detailed_inventory
+ ome_response_mock.json_data = {
+ "value": [{"device_id": {"1234": "details"}}, {"device_service_tag": {Constants.service_tag1: "details"}}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'device_info' in result
+
+ def test_main_detailed_inventory_http_error_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ ome_default_args.update(
+ {"fact_subset": "detailed_inventory", "system_query_options": {"device_id": [Constants.device_id1],
+ "device_service_tag": [
+ Constants.service_tag1]}})
+ detailed_inventory = {"detailed_inventory:": {
+ "device_id": {Constants.device_id1: "DeviceService/Devices(Constants.device_id1)/InventoryDetails"},
+ "device_service_tag": {Constants.service_tag1: "DeviceService/Devices(4321)/InventoryDetails"}}}
+ get_device_resource_parameters_mock.return_value = detailed_inventory
+ ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ result = self._run_module(ome_default_args)
+ assert 'device_info' in result
+
+ def test_main_HTTPError_error_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock, ome_response_mock):
+ ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ ome_response_mock.json_data = {"value": [{"device_id1": "details", "device_id2": "details"}]}
+ ome_response_mock.status_code = 400
+ result = self._run_module(ome_default_args)
+ assert 'device_info' in result
+
+ @pytest.mark.parametrize("fact_subset, mutually_exclusive_call",
+ [("basic_inventory", False), ("detailed_inventory", True)])
+ def test_validate_inputs(self, fact_subset, mutually_exclusive_call, mocker):
+ module_params = {"fact_subset": fact_subset}
+ check_mutually_inclusive_arguments_mock = mocker.patch(MODULE_PATH +
+ 'ome_device_info._check_mutually_inclusive_arguments')
+ check_mutually_inclusive_arguments_mock.return_value = None
+ self.module._validate_inputs(module_params)
+ if mutually_exclusive_call:
+ check_mutually_inclusive_arguments_mock.assert_called()
+ else:
+ check_mutually_inclusive_arguments_mock.assert_not_called()
+ check_mutually_inclusive_arguments_mock.reset_mock()
+
+ system_query_options_params = [{"system_query_options": None}, {"system_query_options": {"device_id": None}},
+ {"system_query_options": {"device_service_tag": None}}]
+
+ @pytest.mark.parametrize("system_query_options_params", system_query_options_params)
+ def test_check_mutually_inclusive_arguments(self, system_query_options_params):
+ module_params = {"fact_subset": "subsystem_health"}
+ required_args = ["device_id", "device_service_tag"]
+ module_params.update(system_query_options_params)
+ with pytest.raises(ValueError) as ex:
+ self.module._check_mutually_inclusive_arguments(module_params["fact_subset"], module_params,
+ ["device_id", "device_service_tag"])
+ assert "One of the following {0} is required for {1}".format(required_args,
+ module_params["fact_subset"]) == str(ex.value)
+
+ params = [{"fact_subset": "basic_inventory", "system_query_options": {"device_id": [Constants.device_id1]}},
+ {"fact_subset": "subsystem_health",
+ "system_query_options": {"device_service_tag": [Constants.service_tag1]}},
+ {"fact_subset": "detailed_inventory",
+ "system_query_options": {"device_id": [Constants.device_id1], "inventory_type": "serverDeviceCards"}}]
+
+ @pytest.mark.parametrize("module_params", params)
+ def test_get_resource_parameters(self, module_params, ome_connection_mock):
+ self.module._get_resource_parameters(module_params, ome_connection_mock)
+
+ @pytest.mark.parametrize("module_params,data", [({"system_query_options": None}, None),
+ ({"system_query_options": {"fileter": None}}, None),
+ ({"system_query_options": {"filter": "abc"}}, "$filter")])
+ def test_get_query_parameters(self, module_params, data):
+ res = self.module._get_query_parameters(module_params)
+ if data is not None:
+ assert data in res
+ else:
+ assert res is None
+
+ @pytest.mark.parametrize("module_params", params)
+ def test_get_device_identifier_map(self, module_params, ome_connection_mock, mocker):
+ get_device_id_from_service_tags_mock = mocker.patch(MODULE_PATH +
+ 'ome_device_info._get_device_id_from_service_tags')
+ get_device_id_from_service_tags_mock.return_value = None
+ res = self.module._get_device_identifier_map(module_params, ome_connection_mock)
+ assert isinstance(res, dict)
+
+ def test_check_duplicate_device_id(self):
+ self.module._check_duplicate_device_id([Constants.device_id1],
+ {Constants.device_id1: Constants.service_tag1})
+ assert self.module.device_fact_error_report[Constants.service_tag1] == "Duplicate report of device_id: 1234"
+
+ @pytest.mark.parametrize("val,expected_res", [(123, True), ("abc", False)])
+ def test_is_int(self, val, expected_res):
+ actual_res = self.module.is_int(val)
+ assert actual_res == expected_res
+
+ def test_get_device_id_from_service_tags(self, ome_connection_mock, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'ome_device_info.update_device_details_with_filtering')
+ ome_response_mock.json_data.update({"@odata.context": "/api/$metadata#Collection(DeviceService.Device)"})
+ ome_response_mock.json_data.update({"@odata.count": 1})
+ ome_connection_mock.get_all_report_details.return_value = {"resp_obj": ome_response_mock, "report_list": [
+ {"DeviceServiceTag": Constants.service_tag1,
+ "Id": Constants.device_id1}]}
+ self.module._get_device_id_from_service_tags([Constants.service_tag1, "INVALID"], ome_connection_mock)
+
+ def test_get_device_id_from_service_tags_error_case(self, ome_connection_mock, ome_response_mock):
+ ome_connection_mock.get_all_report_details.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ with pytest.raises(HTTPError) as ex:
+ self.module._get_device_id_from_service_tags(["INVALID"], ome_connection_mock)
+
+ def test_update_device_details_with_filtering_success_case_01(self, ome_connection_mock, ome_response_mock):
+ non_available_tags = [Constants.service_tag2]
+ service_tag_dict = {Constants.device_id1: Constants.service_tag1}
+ ome_response_mock.json_data = {
+ "value": [{"DeviceServiceTag": Constants.service_tag2, "Id": Constants.device_id2}]}
+ self.module.update_device_details_with_filtering(non_available_tags, service_tag_dict, ome_connection_mock)
+ assert service_tag_dict[Constants.device_id1] == Constants.service_tag1
+ assert service_tag_dict[Constants.device_id2] == Constants.service_tag2
+ assert len(non_available_tags) == 0
+
+ def test_update_device_details_with_filtering_success_case_02(self, ome_connection_mock, ome_response_mock):
+ non_available_tags = ["MX700"]
+ service_tag_dict = {Constants.device_id1: Constants.service_tag1}
+ ome_response_mock.json_data = {"value": [{"DeviceServiceTag": "MX7000", "Id": Constants.device_id2}]}
+ self.module.update_device_details_with_filtering(non_available_tags, service_tag_dict, ome_connection_mock)
+ assert service_tag_dict[Constants.device_id1] == Constants.service_tag1
+ assert Constants.device_id2 not in service_tag_dict
+ assert len(non_available_tags) == 1
+
+ def test_update_device_details_with_filtering_failure_case_01(self, ome_connection_mock, ome_response_mock):
+ error_msg = '400: Bad Request'
+ service_tag_dict = {}
+ non_available_tags = [Constants.service_tag2]
+ ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, error_msg, {}, None)
+ with pytest.raises(HTTPError, match=error_msg) as ex:
+ self.module.update_device_details_with_filtering(non_available_tags, service_tag_dict, ome_connection_mock)
+
+ def test_main_detailed_inventory_device_fact_error_report_case_01(self, ome_default_args, module_mock,
+ validate_device_inputs_mock, ome_connection_mock,
+ get_device_resource_parameters_mock,
+ ome_response_mock):
+ ome_default_args.update(
+ {"fact_subset": "detailed_inventory", "system_query_options": {"device_id": [Constants.device_id1],
+ "device_service_tag": [
+ Constants.service_tag1]}})
+ detailed_inventory = {
+ "detailed_inventory:": {
+ "device_id": {
+ Constants.device_id1: "DeviceService/Devices(Constants.device_id1)/InventoryDetails"
+ },
+ "device_service_tag": {
+ Constants.service_tag1: "DeviceService/Devices(4321)/InventoryDetails"
+ }
+ }
+ }
+ get_device_resource_parameters_mock.return_value = detailed_inventory
+ ome_response_mock.json_data = {"value": [{"device_id": {Constants.device_id1: "details"}},
+ {"device_service_tag": {Constants.service_tag1: "details"}}]}
+ ome_response_mock.status_code = 200
+ self.module.device_fact_error_report = {
+ Constants.service_tag1: "Duplicate report of device_id: {0}".format(Constants.device_id1)}
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'device_info' in result
+
+ def test_main_detailed_inventory_device_fact_error_report_case_02(self, ome_default_args, module_mock,
+ validate_device_inputs_mock,
+ ome_connection_mock,
+ get_device_resource_parameters_mock,
+ ome_response_mock):
+ ome_default_args.update(
+ {"fact_subset": "detailed_inventory", "system_query_options": {"device_id": [Constants.device_id1],
+ "device_service_tag": [
+ Constants.service_tag1]}})
+ detailed_inventory = {
+ "device_service_tag": {
+ Constants.service_tag1: "DeviceService/Devices(4321)/InventoryDetails"
+ }
+ }
+ get_device_resource_parameters_mock.return_value = detailed_inventory
+ ome_response_mock.json_data = {"value": [{"device_id": {Constants.device_id1: "details"}},
+ {"device_service_tag": {Constants.service_tag1: "details"}}]}
+ ome_response_mock.status_code = 200
+ self.module.device_fact_error_report = {
+ Constants.service_tag1: "Duplicate report of device_id: {0}".format(Constants.device_id1)}
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'device_info' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
new file mode 100644
index 00000000..23bae781
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_local_access_configuration
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_local_access_configuration.'
+
+
+@pytest.fixture
+def ome_conn_mock_lac(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMDevicePower(FakeAnsibleModule):
+
+ module = ome_device_local_access_configuration
+
+ def test_check_domain_service(self, ome_conn_mock_lac, ome_default_args):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_lac)
+ assert result is None
+
+ def test_get_chassis_device(self, ome_conn_mock_lac, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.168.1.1"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["192.168.1.2"]}]}
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.get_chassis_device(f_module, ome_conn_mock_lac)
+ assert err.value.args[0] == "Unable to retrieve the device information."
+
+ def test_get_ip_from_host(self, ome_conn_mock_lac, ome_default_args, ome_response_mock):
+ result = self.module.get_ip_from_host("192.168.0.1")
+ assert result == "192.168.0.1"
+
+ def test_get_device_details(self, ome_conn_mock_lac, ome_default_args, ome_response_mock, mocker):
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {
+ "value": [], "SettingType": "LocalAccessConfiguration", "EnableChassisDirect": False,
+ "EnableChassisPowerButton": False, "EnableKvmAccess": True, "EnableLcdOverridePin": False,
+ "LcdAccess": "VIEW_ONLY", "LcdCustomString": "LCD Text", "LcdLanguage": "en", }
+ with pytest.raises(Exception) as err:
+ self.module.get_device_details(ome_conn_mock_lac, f_module)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id '25012' is invalid."
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.json_data = {"value": [{"Id": 25012, "DeviceServiceTag": "GHRT2RL"}], "EnableKvmAccess": True}
+ mocker.patch(MODULE_PATH + 'check_mode_validation', return_value={"EnableKvmAccess": True})
+ resp = self.module.get_device_details(ome_conn_mock_lac, f_module)
+ assert resp.json_data["EnableKvmAccess"] is True
+ param = {"hostname": "192.168.1.6", "enable_kvm_access": True}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + 'get_chassis_device', return_value=("Id", 25011))
+ resp = self.module.get_device_details(ome_conn_mock_lac, f_module)
+ assert resp.json_data["EnableKvmAccess"] is True
+
+ def test_check_mode_validation(self, ome_conn_mock_lac, ome_default_args, ome_response_mock, mocker):
+ loc_data = {"EnableKvmAccess": True, "EnableChassisDirect": True, "EnableChassisPowerButton": True,
+ "EnableLcdOverridePin": True, "LcdAccess": True, "LcdCustomString": "LCD Text",
+ "LcdLanguage": "en", "LcdOverridePin": 123456, "LcdPresence": "Present",
+ "QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
+ "TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
+ "EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}, }
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ assert err.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ assert err.value.args[0] == "No changes found to be applied."
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": False}
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ assert err.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ result = self.module.check_mode_validation(f_module, loc_data)
+ assert result["EnableKvmAccess"] is False
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_power_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_lac, ome_response_mock):
+ ome_default_args.update({"device_id": 25011, "enable_kvm_access": True})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
new file mode 100644
index 00000000..8133e016
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
@@ -0,0 +1,130 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.3.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_location
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_location.'
+
+
+@pytest.fixture
+def ome_conn_mock_location(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMDeviceLocation(FakeAnsibleModule):
+
+ module = ome_device_location
+
+ def test_check_domain_service(self, ome_conn_mock_location, ome_default_args, mocker):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_location)
+ assert result is None
+
+ def test_standalone_chassis(self, ome_conn_mock_location, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.168.1.1"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["192.168.1.2"]}]}
+
+ param = {"data_center": "data center 1", "rack_slot": 2, "device_id": 25012, "hostname": "192.168.1.6",
+ "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.standalone_chassis(f_module, ome_conn_mock_location)
+ assert err.value.args[0] == "Failed to fetch the device information."
+
+ def test_validate_dictionary(self, ome_conn_mock_location, ome_default_args, mocker):
+ param = {"data_center": "data center 1", "rack_slot": 2,
+ "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ loc_resp = {"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
+ "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ with pytest.raises(Exception) as err:
+ self.module.validate_dictionary(f_module, loc_resp)
+ loc_resp = {"DataCenter": "data center 1", "RackSlot": 3, "Room": "room 1",
+ "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ with pytest.raises(Exception) as err:
+ self.module.validate_dictionary(f_module, loc_resp)
+ assert err.value.args[0] == "Changes found to be applied."
+ loc_resp = {"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
+ "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ f_module.check_mode = False
+ with pytest.raises(Exception) as err:
+ self.module.validate_dictionary(f_module, loc_resp)
+ assert err.value.args[0] == "No changes found to be applied."
+ loc_resp = {"DataCenter": "data center 1", "RackSlot": 3, "Room": "room 1",
+ "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ result = self.module.validate_dictionary(f_module, loc_resp)
+ assert result == {"DataCenter": "data center 1", "RackSlot": 2,
+ "Room": "room 1", "Aisle": "aisle 1", "RackName": "rack 1",
+ "Location": "location 1", "SettingType": "Location"}
+
+ def test_device_validation(self, ome_conn_mock_location, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "validate_dictionary",
+ return_value={"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
+ "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1",
+ "SettingType": "Location"})
+ param = {"data_center": "data center 1", "rack_slot": 2, "device_id": 25012,
+ "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {
+ "value": [], "DataCenter": "data center 1",
+ "RackSlot": 3, "Room": "room 1", "Aisle": "aisle 1", "RackName": "rack 1",
+ "Location": "location 1", "SettingType": "Location", "result": {"RackSlot": 4}}
+ with pytest.raises(Exception) as err:
+ self.module.device_validation(f_module, ome_conn_mock_location)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id '25012' is invalid."
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_location_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_location, ome_response_mock):
+ ome_default_args.update({"device_id": 25011, "data_center": "data center 1",
+ "room": "room 1", "aisle": "aisle 1", "rack": "rack 1",
+ "rack_slot": "2", "location": "location 1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
new file mode 100644
index 00000000..69206143
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
@@ -0,0 +1,408 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.2.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_mgmt_network
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_mgmt_network.'
+
+DEVICE_NOT_FOUND = "Device with {0} '{1}' not found."
+NON_CONFIG_NETWORK = "Network settings for {0} is not configurable."
+SUCCESS_MSG = "Successfully applied the network settings."
+INVALID_IP = "Invalid {0} address provided for the {1}"
+DNS_SETT_ERR1 = "'SecondaryDNS' requires 'PrimaryDNS' to be provided."
+DNS_SETT_ERR2 = "'TertiaryDNS' requires both 'PrimaryDNS' and 'SecondaryDNS' to be provided."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+SERVER = 1000
+CHASSIS = 2000
+IO_MODULE = 4000
+
+
+@pytest.fixture
+def ome_connection_mock_for_device_network(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeDeviceMgmtNetwork(FakeAnsibleModule):
+ module = ome_device_mgmt_network
+ dns_configuration = {"dns_domain_name": "localdomain", "dns_name": "openmanage-enterprise",
+ "register_with_dns": False, "auto_negotiation": False,
+ "network_speed": "10_MB", "use_dhcp_for_dns_domain_name": False}
+ ipv4_configuration = {"enable_ipv4": True, "enable_dhcp": False, "use_dhcp_to_obtain_dns_server_address": False,
+ "static_ip_address": "192.168.11.20", "static_subnet_mask": "255.255.255.0",
+ "static_gateway": "192.168.11.1", "static_preferred_dns_server": "192.168.11.2",
+ "static_alternate_dns_server": "192.168.11.3"}
+ ipv6_configuration = {"enable_ipv6": True, "enable_auto_configuration": False,
+ "static_alternate_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f121",
+ "static_gateway": "0000::ffff",
+ "static_ip_address": "2607:f2b1:f081:9:1c8c:f1c7:47e:f120",
+ "static_preferred_dns_server": "2607:f2b1:f081:9:1c8c:f1c7:47e:f122",
+ "static_prefix_length": 0, "use_dhcpv6_to_obtain_dns_server_address": False}
+ dns_server_settings = {"preferred_dns_server": "192.96.20.181", "alternate_dns_server1": "192.96.20.182"}
+ management_vlan = {"enable_vlan": True, "vlan_id": 0}
+ inp_param = {
+ "hostname": "192.1.2.3",
+ "password": "password",
+ "port": 443,
+ "username": "root",
+ "device_service_tag": Constants.service_tag1,
+ "delay": 10,
+ "dns_configuration": dns_configuration,
+ "ipv4_configuration": ipv4_configuration,
+ "ipv6_configuration": ipv6_configuration,
+ "management_vlan": management_vlan,
+ "dns_server_settings": dns_server_settings
+ }
+ chassis = {
+ "SettingType": "Network",
+ "MgmtVLANId": "1",
+ "EnableVLAN": True,
+ "Ipv4Settings": {
+ "EnableIPv4": True,
+ "EnableDHCP": False,
+ "StaticIPAddress": "192.196.24.176",
+ "StaticSubnetMask": "255.255.254.0",
+ "StaticGateway": "192.196.24.1",
+ "UseDHCPObtainDNSServerAddresses": False,
+ "StaticPreferredDNSServer": "",
+ "StaticAlternateDNSServer": ""
+ },
+ "Ipv6Settings": {
+ "EnableIPv6": False,
+ "EnableAutoconfiguration": False,
+ "StaticIPv6Address": "",
+ "StaticPrefixLength": "0",
+ "StaticGateway": "",
+ "UseDHCPv6ObtainDNSServerAddresses": False,
+ "StaticPreferredDNSServer": "",
+ "StaticAlternateDNSServer": ""
+ },
+ "GeneralSettings": {
+ "EnableNIC": True,
+ "RegisterDNS": False,
+ "DnsName": "MX-6H5S6Z2",
+ "UseDHCPForDomainName": False,
+ "DnsDomainName": "",
+ "AutoNegotiation": True,
+ "NetworkSpeed": "1_GB",
+ "Delay": 0
+ }
+ }
+ server = {"SettingType": "Network",
+ "useDHCPToObtainDNSIPv6": "Disabled",
+ "staticPreferredDNSIPv6": "::",
+ "currentGatewayIPv4": "192.92.24.1",
+ "vlanId": "1",
+ "staticPreferredDNSIPv4": "10.8.8.8",
+ "staticSubnetMaskIPv4": "255.255.254.0",
+ "currentIPAddressIPv4": "192.92.24.177",
+ "enableDHCPIPv4": "Disabled",
+ "currentIPAddressIPv6": "::",
+ "staticIPAddressIPv6": "::",
+ "staticIPAddressIPv4": "192.92.24.177",
+ "useDHCPToObtainDNSIPv4": "Disabled",
+ "staticGatewayIPv6": "::",
+ "staticPrefixLengthIPv6": "64",
+ "vlanEnable": "Disabled",
+ "enableAutoConfigurationIPv6": "Enabled",
+ "staticGatewayIPv4": "192.92.24.1",
+ "enableIPv6": "Disabled",
+ "staticAlternateDNSIPv6": "::",
+ "enableIPv4": "Enabled",
+ "enableNIC": "Enabled",
+ "staticAlternateDNSIPv4": "192.96.7.7"}
+ iom = {"SettingType": "Network",
+ "MgmtVLANId": "",
+ "EnableMgmtVLANId": False,
+ "IomIPv4Settings": {
+ "EnableIPv4": True,
+ "EnableDHCP": True,
+ "StaticIPAddress": "192.96.24.35",
+ "StaticSubnetMask": "255.255.254.0",
+ "StaticGateway": "192.96.24.1"
+ },
+ "IomIPv6Settings": {
+ "EnableIPv6": True,
+ "StaticIPv6Address": "2607:f2b1:f2b1:9:f2b1:f2b1:f2b1:be45",
+ "StaticPrefixLength": "64",
+ "StaticGateway": "fe80::f2b1:f2b1:f2b1:9",
+ "UseDHCPv6": False
+ },
+ "IomDNSSettings": {
+ "PrimaryDNS": "",
+ "SecondaryDNS": "",
+ "TertiaryDNS": ""
+ }}
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": inp_param, "dvc": {"Type": 2000}, "msg": SUCCESS_MSG},
+ {"module_args": inp_param, "dvc": {"Type": 1000}, "msg": SUCCESS_MSG},
+ {"module_args": inp_param, "dvc": {"Type": 4000}, "msg": SUCCESS_MSG}
+ ])
+ def test_ome_device_mgmt_network_success(self, params, ome_connection_mock_for_device_network,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"IPAddress": "192.1.2.3"}
+ mocker.patch(MODULE_PATH + 'get_device_details', return_value=params.get("dvc", {"Type": 2000}))
+ mocker.patch(MODULE_PATH + 'get_network_payload', return_value={"Type": 2000})
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": inp_param, "dvc": {"Type": 3000, "Model": "Unsupported"}, "msg": NON_CONFIG_NETWORK}, ])
+ def test_ome_device_mgmt_network_fails(self, params, ome_connection_mock_for_device_network,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"IPAddress": "192.1.2.3"}
+ dvc = params.get("dvc")
+ mocker.patch(MODULE_PATH + 'get_device_details', return_value=dvc)
+ mocker.patch(MODULE_PATH + 'get_network_payload', return_value={})
+ ome_default_args.update(params['module_args'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['msg'].format(dvc.get('Model'))
+
+ @pytest.mark.parametrize("params", [
+ {"module_args": {"device_id": 123, "dns_server_settings": {"alternate_dns_server1": "192.96.20.182"}},
+ "json_data": {"IomDNSSettings": {"PrimaryDNS": None, "SecondaryDNS": "", "TertiaryDNS": ""}},
+ "dvc": {"Type": 4000}, "msg": DNS_SETT_ERR1}])
+ def _test_ome_device_mgmt_iom_dns_failure(self, params, ome_connection_mock_for_device_network,
+ ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params.get("json_data")
+ dvc = params.get("dvc")
+ mocker.patch(MODULE_PATH + 'get_device_details', return_value=dvc)
+ ome_default_args.update(params['module_args'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("addr_param", [{"in": inp_param},
+ {"in": {"dns_configuration": {"register_with_dns": True}}},
+ {"in": {"management_vlan": {"enable_vlan": True}}}
+ ])
+ def test_validate_input_success(self, addr_param):
+ f_module = self.get_module_mock(params=addr_param["in"])
+ self.module.validate_input(f_module)
+
+ @pytest.mark.parametrize("param", [{"in": inp_param, "device": chassis, "enable_nic": False, "delay": 5,
+ "diff": {'EnableNIC': False, 'Delay': 5}},
+ {"in": inp_param, "device": chassis, "enable_nic": True,
+ "diff": {'StaticAlternateDNSServer': '2607:f2b1:f081:9:1c8c:f1c7:47e:f121',
+ 'StaticPreferredDNSServer': '2607:f2b1:f081:9:1c8c:f1c7:47e:f122',
+ 'StaticGateway': '0000::ffff', 'StaticSubnetMask': '255.255.255.0',
+ 'StaticIPAddress': '192.168.11.20',
+ 'StaticIPv6Address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'StaticPrefixLength': 0, 'EnableIPv6': True, 'NetworkSpeed': '10_MB',
+ 'DnsName': 'openmanage-enterprise', 'AutoNegotiation': False,
+ 'DnsDomainName': 'localdomain', 'MgmtVLANId': 0}},
+ {"in": {"ipv6_configuration": ipv6_configuration}, "device": chassis,
+ "enable_nic": True,
+ "diff": {'StaticAlternateDNSServer': '2607:f2b1:f081:9:1c8c:f1c7:47e:f121',
+ 'StaticPreferredDNSServer': '2607:f2b1:f081:9:1c8c:f1c7:47e:f122',
+ 'StaticGateway': '0000::ffff',
+ 'StaticIPv6Address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'StaticPrefixLength': 0, 'EnableIPv6': True}},
+ {"in": {"ipv4_configuration": ipv4_configuration}, "device": chassis,
+ "enable_nic": True,
+ "diff": {'StaticAlternateDNSServer': '192.168.11.3',
+ 'StaticPreferredDNSServer': '192.168.11.2',
+ 'StaticGateway': '192.168.11.1', 'StaticSubnetMask': '255.255.255.0',
+ 'StaticIPAddress': '192.168.11.20'}},
+ {"in": {"dns_configuration": dns_configuration}, "device": chassis,
+ "enable_nic": True,
+ "diff": {'NetworkSpeed': '10_MB', 'DnsName': 'openmanage-enterprise',
+ 'AutoNegotiation': False, 'DnsDomainName': 'localdomain'}},
+ {"in": {"management_vlan": management_vlan}, "device": chassis,
+ "enable_nic": True,
+ "diff": {'MgmtVLANId': 0}}])
+ def test_update_chassis_payload_success(self, param):
+ inp = param["in"]
+ inp['enable_nic'] = param.get("enable_nic")
+ inp['delay'] = param.get('delay', 0)
+ f_module = self.get_module_mock(params=inp)
+ diff = self.module.update_chassis_payload(f_module, param["device"])
+ assert diff == param.get("diff")
+
+ @pytest.mark.parametrize("param", [{"in": inp_param, "device": server, "enable_nic": False,
+ "diff": {'enableNIC': 'Disabled'}},
+ {"in": inp_param, "device": server, "enable_nic": True,
+ "diff": {'staticIPAddressIPv4': '192.168.11.20',
+ 'staticSubnetMaskIPv4': '255.255.255.0',
+ 'staticGatewayIPv4': '192.168.11.1',
+ 'staticPreferredDNSIPv4': '192.168.11.2',
+ 'staticAlternateDNSIPv4': '192.168.11.3',
+ 'enableAutoConfigurationIPv6': 'Disabled',
+ 'vlanEnable': 'Enabled',
+ 'staticPreferredDNSIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f122',
+ 'staticAlternateDNSIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f121',
+ 'staticIPAddressIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'staticPrefixLengthIPv6': 0, 'staticGatewayIPv6': '0000::ffff',
+ 'enableIPv6': 'Enabled',
+ 'vlanId': 0}},
+ {"in": {"ipv6_configuration": ipv6_configuration}, "device": server,
+ "enable_nic": True,
+ "diff": {'staticPreferredDNSIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f122',
+ 'staticAlternateDNSIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f121',
+ 'staticIPAddressIPv6': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'staticPrefixLengthIPv6': 0, 'staticGatewayIPv6': '0000::ffff',
+ 'enableAutoConfigurationIPv6': 'Disabled', 'enableIPv6': 'Enabled'}},
+ {"in": {"ipv4_configuration": ipv4_configuration}, "device": server,
+ "enable_nic": True, "diff": {'staticIPAddressIPv4': '192.168.11.20',
+ 'staticSubnetMaskIPv4': '255.255.255.0',
+ 'staticGatewayIPv4': '192.168.11.1',
+ 'staticPreferredDNSIPv4': '192.168.11.2',
+ 'staticAlternateDNSIPv4': '192.168.11.3'}},
+ {"in": {"management_vlan": management_vlan}, "device": server,
+ "enable_nic": True, "diff": {'vlanEnable': 'Enabled', 'vlanId': 0}}
+ ])
+ def test_update_server_payload_success(self, param):
+ inp = param["in"]
+ inp['enable_nic'] = param.get("enable_nic")
+ f_module = self.get_module_mock(params=inp)
+ diff = self.module.update_server_payload(f_module, param["device"])
+ assert diff == param.get("diff")
+
+ @pytest.mark.parametrize("param", [{"in": inp_param, "device": iom, "enable_nic": False,
+ "diff": {'StaticGateway': '0000::ffff', 'StaticIPAddress': '192.168.11.20',
+ 'StaticSubnetMask': '255.255.255.0', 'EnableDHCP': False,
+ 'EnableMgmtVLANId': True,
+ 'StaticPrefixLength': 0,
+ 'StaticIPv6Address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'MgmtVLANId': 0, 'SecondaryDNS': '192.96.20.182',
+ 'PrimaryDNS': '192.96.20.181'}},
+ {"in": inp_param, "device": iom, "enable_nic": True,
+ "diff": {'StaticGateway': '0000::ffff', 'StaticIPAddress': '192.168.11.20',
+ 'StaticSubnetMask': '255.255.255.0', 'EnableDHCP': False,
+ 'StaticPrefixLength': 0, 'EnableMgmtVLANId': True,
+ 'StaticIPv6Address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'MgmtVLANId': 0, 'SecondaryDNS': '192.96.20.182',
+ 'PrimaryDNS': '192.96.20.181'}},
+ {"in": {"ipv6_configuration": ipv6_configuration}, "device": iom,
+ "enable_nic": True, "diff": {'StaticGateway': '0000::ffff',
+ 'StaticPrefixLength': 0,
+ 'StaticIPv6Address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120'}},
+ {"in": {"ipv4_configuration": ipv4_configuration}, "device": iom,
+ "enable_nic": True,
+ "diff": {'StaticGateway': '192.168.11.1', 'StaticIPAddress': '192.168.11.20',
+ 'StaticSubnetMask': '255.255.255.0', 'EnableDHCP': False}},
+ {"in": {"management_vlan": management_vlan}, "device": iom,
+ "enable_nic": True, "diff": {'EnableMgmtVLANId': True, 'MgmtVLANId': 0}}
+ ])
+ def test_update_iom_payload_success(self, param):
+ inp = param["in"]
+ inp['enable_nic'] = param.get("enable_nic")
+ f_module = self.get_module_mock(params=inp)
+ diff = self.module.update_iom_payload(f_module, param["device"])
+ assert diff == param.get("diff")
+
+ @pytest.mark.parametrize("params", [{"mparams": {
+ 'dns_configuration': {'dns_domain_name': 'localdomain', 'dns_name': 'openmanage-enterprise',
+ 'register_with_dns': True, 'auto_negotiation': True,
+ 'network_speed': '10_MB', 'use_dhcp_for_dns_domain_name': True},
+ 'ipv4_configuration': {'enable_ipv4': False, 'enable_dhcp': True, 'use_dhcp_to_obtain_dns_server_address': True,
+ 'static_ip_address': '192.168.11.20', 'static_subnet_mask': '255.255.255.0',
+ 'static_gateway': '192.168.11.1', 'static_preferred_dns_server': '192.168.11.2',
+ 'static_alternate_dns_server': '192.168.11.3'},
+ 'ipv6_configuration': {'enable_ipv6': False, 'enable_auto_configuration': True,
+ 'static_alternate_dns_server': '2607:f2b1:f081:9:1c8c:f1c7:47e:f121',
+ 'static_gateway': '0000::ffff', 'static_ip_address': '2607:f2b1:f081:9:1c8c:f1c7:47e:f120',
+ 'static_preferred_dns_server': '2607:f2b1:f081:9:1c8c:f1c7:47e:f122',
+ 'static_prefix_length': 0, 'use_dhcpv6_to_obtain_dns_server_address': True},
+ 'management_vlan': {'enable_vlan': False, 'vlan_id': 0},
+ 'dns_server_settings': {'preferred_dns_server': '192.96.20.181',
+ 'alternate_dns_server1': '192.96.20.182'}},
+ "res": {'dns_configuration': {'dns_name': 'openmanage-enterprise',
+ 'register_with_dns': True, 'auto_negotiation': True,
+ 'use_dhcp_for_dns_domain_name': True},
+ 'ipv4_configuration': {'enable_ipv4': False},
+ 'ipv6_configuration': {'enable_ipv6': False},
+ 'management_vlan': {'enable_vlan': False},
+ 'dns_server_settings': {'preferred_dns_server': '192.96.20.181',
+ 'alternate_dns_server1': '192.96.20.182'}}}])
+ def test_validate_dependency(self, params):
+ mparams = params["mparams"]
+ result = self.module.validate_dependency(mparams)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("params", [{"mparams": {"device_id": 123}, "success": True, "json_data": {
+ "value": [{"Name": "vlan_name1", "Id": 124, "Identifier": "ABCD345"},
+ {"Name": "vlan_name", "Id": 123, "Identifier": "ABCD123"}]}, "res":
+ {"Name": "vlan_name", "Id": 123, "Identifier": "ABCD123"}}, {
+ "mparams": {"device_service_tag": "ABCD123"}, "success": True,
+ "json_data": {"value": [{"Name": "vlan_name", "Id": 123, "Identifier": "ABCD123"}]},
+ "res": {"Name": "vlan_name", "Id": 123, "Identifier": "ABCD123"}}])
+ def test_get_device_details(
+ self, params, ome_connection_mock_for_device_network, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_device_details(
+ f_module, ome_connection_mock_for_device_network)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"device_id": 123}, "success": True,
+ "json_data": {"Type": 2000, "Id": 123, "Identifier": "ABCD123"},
+ "res": {"Type": 2000, "Id": 123, "Identifier": "ABCD123"},
+ "diff": {"IPV4": "1.2.3.4"}},
+ {"mparams": {"device_id": 123}, "success": True,
+ "json_data": {"Type": 4000, "Id": 123, "Identifier": "ABCD123"},
+ "res": {"Type": 4000, "Id": 123, "Identifier": "ABCD123"},
+ "diff": {"IPV4": "1.2.3.4"}},
+ ])
+ def test_get_network_payload(
+ self, params, ome_connection_mock_for_device_network, ome_response_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ ome_connection_mock_for_device_network.strip_substr_dict.return_value = params.get("json_data")
+ mocker.patch(MODULE_PATH + 'update_chassis_payload', return_value=params['diff'])
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_network_payload(
+ f_module, ome_connection_mock_for_device_network, {"Id": 123, "Type": 2000})
+ assert result == params.get("res")
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLValidationError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_device_network_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_device_network, ome_response_mock):
+ ome_default_args.update({"device_service_tag": Constants.service_tag1})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'validate_input', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'validate_input', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'validate_input',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py
new file mode 100644
index 00000000..0a68ac9d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pdb
+
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_network_services
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_network_services.'
+
+
+@pytest.fixture
+def ome_conn_mock_network(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMDeviceNetworkService(FakeAnsibleModule):
+
+ module = ome_device_network_services
+
+ def test_check_domain_service(self, ome_conn_mock_network, ome_default_args):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_network)
+ assert result is None
+
+ def test_check_domain_service_http(self, ome_conn_mock_network, ome_default_args, mocker):
+ f_module = self.get_module_mock()
+ err_message = {'error': {'@Message.ExtendedInfo': [{'MessageId': 'CGEN1006'}]}}
+ ome_conn_mock_network.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ json.dumps(err_message),
+ {"accept-type": "application/json"}, None)
+ mocker.patch(MODULE_PATH + 'json.loads', return_value=err_message)
+ with pytest.raises(Exception) as err:
+ self.module.check_domain_service(f_module, ome_conn_mock_network)
+ assert err.value.args[0] == "The device location settings operation is supported only on " \
+ "OpenManage Enterprise Modular."
+
+ def test_get_chassis_device(self, ome_conn_mock_network, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.168.1.1"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["192.168.1.2"]}]}
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.get_chassis_device(f_module, ome_conn_mock_network)
+ assert err.value.args[0] == "Failed to retrieve the device information."
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.18.1.1"]}]}
+ param = {"hostname": "192.18.1.1", "remote_racadm_settings": {"enabled": True}}
+ f_module = self.get_module_mock(params=param)
+ key, value = self.module.get_chassis_device(f_module, ome_conn_mock_network)
+ assert key == "Id"
+ assert value == 25011
+
+ def test_main_validation(self, ome_conn_mock_network, ome_default_args, ome_response_mock, mocker):
+ resp = self._run_module_with_fail_json(ome_default_args)
+ assert resp['msg'] == "one of the following is required: snmp_settings, " \
+ "ssh_settings, remote_racadm_settings"
+ mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
+ mocker.patch(MODULE_PATH + "fetch_device_details", return_value=ome_response_mock)
+ ome_response_mock.json_data = {"value": [{"Id": 25011, "DeviceServiceTag": "XE3FRS"}],
+ "EnableRemoteRacadm": True, "SettingType": "NetworkServices",
+ "SnmpConfiguration": {"PortNumber": 161, "SnmpEnabled": True,
+ "SnmpV1V2Credential": {"CommunityName": "public"}},
+ "SshConfiguration": {"IdleTimeout": 60, "MaxAuthRetries": 3, "MaxSessions": 1,
+ "PortNumber": 22, "SshEnabled": False}}
+ ome_default_args.update({"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True},
+ "snmp_settings": {"enabled": True, "port_number": 161, "community_name": "public"},
+ "ssh_settings": {"enabled": True, "port_number": 22, "max_sessions": 1,
+ "max_auth_retries": 3, "idle_timeout": 60}})
+ resp = self._run_module(ome_default_args)
+ assert resp['msg'] == "Successfully updated the network services settings."
+
+ def test_fetch_device_details(self, ome_conn_mock_network, ome_default_args, ome_response_mock, mocker):
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"Id": 25011, "DeviceServiceTag": "XE3FRS"}],
+ "EnableRemoteRacadm": True, "SettingType": "NetworkServices",
+ "SnmpConfiguration": {"PortNumber": 161, "SnmpEnabled": True,
+ "SnmpV1V2Credential": {"CommunityName": "public"}},
+ "SshConfiguration": {"IdleTimeout": 60, "MaxAuthRetries": 3, "MaxSessions": 1,
+ "PortNumber": 22, "SshEnabled": False}}
+ with pytest.raises(Exception) as err:
+ self.module.fetch_device_details(f_module, ome_conn_mock_network)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id '25012' is invalid."
+ ome_response_mock.strip_substr_dict.return_value = {"EnableRemoteRacadm": True}
+ ome_response_mock.json_data = {"value": [{"Id": 25012, "DeviceServiceTag": "XE3FRS"}],
+ "EnableRemoteRacadm": True, "SnmpConfiguration": {}, "SshConfiguration": {}}
+ resp = self.module.fetch_device_details(f_module, ome_conn_mock_network)
+ assert resp.json_data["SnmpConfiguration"] == {}
+ param = {"hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "get_chassis_device", return_value=("Id", "25012"))
+ resp = self.module.fetch_device_details(f_module, ome_conn_mock_network)
+ assert resp.json_data["SnmpConfiguration"] == {}
+
+ def test_get_ip_from_host(self, ome_conn_mock_network, ome_default_args, ome_response_mock):
+ result = self.module.get_ip_from_host("192.168.0.1")
+ assert result == "192.168.0.1"
+
+ def test_check_mode_validation(self, ome_conn_mock_network, ome_default_args, ome_response_mock):
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True},
+ "snmp_settings": {"enabled": True, "port_number": 161, "community_name": "public"},
+ "ssh_settings": {"enabled": True, "port_number": 22, "max_sessions": 1,
+ "max_auth_retries": 3, "idle_timeout": 120}}
+ f_module = self.get_module_mock(params=param)
+ loc_data = {"EnableRemoteRacadm": True, "SettingType": "NetworkServices",
+ "SnmpConfiguration": {"PortNumber": 161, "SnmpEnabled": True,
+ "SnmpV1V2Credential": {"CommunityName": "public"}},
+ "SshConfiguration": {"IdleTimeout": 7200, "MaxAuthRetries": 3, "MaxSessions": 1,
+ "PortNumber": 22, "SshEnabled": True}}
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data, ome_conn_mock_network)
+ assert err.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = True
+ loc_data["SshConfiguration"]["IdleTimeout"] = 7200
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data, ome_conn_mock_network)
+ assert err.value.args[0] == "No changes found to be applied."
+ loc_data = {"EnableRemoteRacadm": True, "SettingType": "NetworkServices",
+ "SnmpConfiguration": {"PortNumber": 161, "SnmpEnabled": False,
+ "SnmpV1V2Credential": {"CommunityName": "public"}},
+ "SshConfiguration": {"IdleTimeout": 60, "MaxAuthRetries": 3, "MaxSessions": 1,
+ "PortNumber": 22, "SshEnabled": False}}
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data, ome_conn_mock_network)
+ assert err.value.args[0] == "Changes found to be applied."
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": False},
+ "snmp_settings": {"enabled": False, "port_number": 161, "community_name": "public"},
+ "ssh_settings": {"enabled": False, "port_number": 22, "max_sessions": 1,
+ "max_auth_retries": 3, "idle_timeout": 60}}
+ f_module = self.get_module_mock(params=param)
+ resp = self.module.check_mode_validation(f_module, loc_data, ome_conn_mock_network)
+ assert resp["SnmpConfiguration"]["PortNumber"] == 161
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_network_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_network, ome_response_mock):
+ ome_default_args.update({"device_id": 25011, "remote_racadm_settings": {"enabled": True}})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
new file mode 100644
index 00000000..928c407c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_power_settings
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock, patch, Mock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_power_settings.'
+
+
+@pytest.fixture
+def ome_conn_mock_power(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMDevicePower(FakeAnsibleModule):
+
+ module = ome_device_power_settings
+
+ def test_check_domain_service(self, ome_conn_mock_power, ome_default_args):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_power)
+ assert result is None
+
+ def test_get_chassis_device(self, ome_conn_mock_power, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.168.1.1"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["192.168.1.2"]}]}
+ param = {"device_id": 25012, "hostname": "192.168.1.6",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.get_chassis_device(f_module, ome_conn_mock_power)
+ assert err.value.args[0] == "Failed to fetch the device information."
+
+ def test_check_mode_validation(self, ome_conn_mock_power, ome_default_args, ome_response_mock):
+ loc_data = {"PowerCap": "3424", "MinPowerCap": "3291", "MaxPowerCap": "3424",
+ "RedundancyPolicy": "NO_REDUNDANCY", "EnablePowerCapSettings": True,
+ "EnableHotSpare": True, "PrimaryGrid": "GRID_1", "PowerBudgetOverride": False}
+ param = {"power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ param = {"hot_spare_configuration": {"enable_hot_spare": False}}
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ assert err.value.args[0] == "Changes found to be applied."
+ param = {"redundancy_configuration": {"redundancy_policy": "NO_REDUNDANCY"}}
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, loc_data)
+ assert err.value.args[0] == "No changes found to be applied."
+
+ def test_fetch_device_details(self, ome_conn_mock_power, ome_default_args, ome_response_mock):
+ param = {"device_id": 25012, "hostname": "192.168.1.6",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [], "PowerCap": "3424", "MinPowerCap": "3291",
+ "MaxPowerCap": "3424", "RedundancyPolicy": "NO_REDUNDANCY",
+ "EnablePowerCapSettings": True, "EnableHotSpare": True,
+ "PrimaryGrid": "GRID_1", "PowerBudgetOverride": False}
+ with pytest.raises(Exception) as err:
+ self.module.fetch_device_details(f_module, ome_conn_mock_power)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id '25012' is invalid."
+
+ def test_get_ip_from_host(self, ome_conn_mock_power, ome_default_args, ome_response_mock):
+ result = self.module.get_ip_from_host("192.168.0.1")
+ assert result == "192.168.0.1"
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_power_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_power, ome_response_mock):
+ ome_default_args.update({"device_id": 25011, "power_configuration": {"enable_power_cap": True,
+ "power_cap": 3424}})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
new file mode 100644
index 00000000..97b611ce
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
@@ -0,0 +1,173 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.0.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_quick_deploy
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_quick_deploy.'
+
+
+@pytest.fixture
+def ome_conn_mock_qd(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMDevicePower(FakeAnsibleModule):
+
+ module = ome_device_quick_deploy
+
+ def test_check_domain_service(self, ome_conn_mock_qd, ome_default_args):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_qd)
+ assert result is None
+
+ def test_get_chassis_device(self, ome_conn_mock_qd, ome_default_args, mocker, ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["192.168.1.1"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["192.168.1.2"]}]}
+ param = {"device_id": 25012, "hostname": "192.168.1.6"}
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.get_chassis_device(f_module, ome_conn_mock_qd)
+ assert err.value.args[0] == "Unable to retrieve the device information."
+
+ def test_get_ip_from_host(self, ome_conn_mock_qd, ome_default_args, ome_response_mock):
+ result = self.module.get_ip_from_host("192.168.0.1")
+ assert result == "192.168.0.1"
+
+ def test_validate_ip_address(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
+ result = self.module.validate_ip_address("192.168.0.1", "IPV4")
+ assert result is True
+ result = self.module.validate_ip_address("192.168.0.1.1", "IPV4")
+ assert result is False
+ result = self.module.validate_ip_address("::", "IPV6")
+ assert result is True
+
+ def test_ip_address_field(self, ome_conn_mock_qd, ome_response_mock, ome_default_args, mocker):
+ param = {"device_id": 25011, "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "192.168.0.1",
+ "ipv4_gateway": "0.0.0.0.0"}, "slots": [{"vlan_id": 1}]}
+ fields = [("ipv4_subnet_mask", "IPV4"), ("ipv4_gateway", "IPV4"), ("ipv6_gateway", "IPV6")]
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "validate_ip_address", return_value=False)
+ with pytest.raises(Exception) as err:
+ self.module.ip_address_field(f_module, fields, param["quick_deploy_options"], slot=False)
+ assert err.value.args[0] == "Invalid '192.168.0.1' address provided for the ipv4_subnet_mask."
+
+ def test_get_device_details(self, ome_conn_mock_qd, ome_response_mock, ome_default_args, mocker):
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "192.168.0.1",
+ "ipv4_gateway": "0.0.0.0"}, "slots": [{"vlan_id": 1}]}
+ f_module = self.get_module_mock(params=param)
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [], "SettingType": "ServerQuickDeploy",
+ "ProtocolTypeV4": "true", "NetworkTypeV4": "Static",
+ "IpV4Gateway": "192.168.0.1", "IpV4SubnetMask": "255.255.255.0"}
+ mocker.patch(MODULE_PATH + 'get_chassis_device', return_value=("Id", 25011))
+ mocker.patch(MODULE_PATH + "check_mode_validation", return_value=({}, {}))
+ mocker.patch(MODULE_PATH + "job_payload_submission", return_value=12345)
+ with pytest.raises(Exception) as err:
+ self.module.get_device_details(ome_conn_mock_qd, f_module)
+ assert err.value.args[0] == "Unable to complete the operation because the entered " \
+ "target device id '25012' is invalid."
+ param.update({"job_wait": False})
+ ome_response_mock.json_data.update({"value": [{"Id": 25012}]})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.get_device_details(ome_conn_mock_qd, f_module)
+ assert result == (12345, None)
+ param.update({"job_wait": True})
+
+ def test_job_payload_submission(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"Id": 12345}
+ ome_conn_mock_qd.job_submission.return_value = ome_response_mock
+ payload = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "255.255.255.0",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0"}
+ slot_payload = [{"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1}]
+ resp_data = {"Slots": [
+ {"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
+ {"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
+ ]}
+ result = self.module.job_payload_submission(ome_conn_mock_qd, payload, slot_payload,
+ "ServerQuickDeploy", 25012, resp_data)
+ assert result == 12345
+
+ def test_check_mode_validation(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
+ param = {"device_id": 25012, "hostname": "192.168.1.6", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {
+ "ipv4_enabled": True, "ipv4_network_type": "Static", "ipv4_subnet_mask": "255.255.255.0",
+ "ipv4_gateway": "0.0.0.0", "ipv6_enabled": True, "ipv6_network_type": "Static",
+ "ipv6_prefix_length": "1", "ipv6_gateway": "0.0.0.0",
+ "slots": [{"slot_id": 1, "slot_ipv4_address": "192.168.0.1",
+ "slot_ipv6_address": "::", "vlan_id": "1"}]}}
+ f_module = self.get_module_mock(params=param)
+ deploy_data = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "255.255.255.0",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0",
+ "Slots": [{"SlotId": 1, "SlotIPV4Address": "192.168.0.1", "SlotIPV6Address": "::", "VlanId": "1"}]}
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, deploy_data)
+ assert err.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, deploy_data)
+ assert err.value.args[0] == "No changes found to be applied."
+ param["quick_deploy_options"]["ipv6_prefix_length"] = "2"
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, deploy_data)
+ assert err.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ result = self.module.check_mode_validation(f_module, deploy_data)
+ assert result[0]["NetworkTypeV4"] == "Static"
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_device_power_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_qd, ome_response_mock):
+ ome_default_args.update({"device_id": 25011, "setting_type": "ServerQuickDeploy", "validate_certs": False,
+ "quick_deploy_options": {"ipv4_enabled": False,
+ "slots": [{"slot_id": 1, "vlan_id": 1}]}})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
new file mode 100644
index 00000000..94e76df1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
@@ -0,0 +1,467 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 6.1.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import CHANGES_MSG, NO_CHANGES_MSG
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_devices
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+DELETE_SUCCESS = "The devices(s) are removed successfully."
+INVALID_DEV_ST = "Unable to complete the operation because the entered target device(s) '{0}' are invalid."
+JOB_DESC = "The {0} task initiated from OpenManage Ansible Modules for devices with the ids '{1}'."
+APPLY_TRIGGERED = "Successfully initiated the device action job."
+JOB_SCHEDULED = "The job is scheduled successfully."
+SUCCESS_MSG = "The device operation is performed successfully."
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_devices.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_devices(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.ome_devices.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeDevices(FakeAnsibleModule):
+ module = ome_devices
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000}]},
+ 'message': DELETE_SUCCESS, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], 'state': 'absent'}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000}]},
+ 'message': CHANGES_MSG, "success": True,
+ 'check_mode': True,
+ 'mparams': {"device_service_tags": ["ABCTAG1", "BCDTAG2"], 'state': 'absent'}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000}]},
+ 'message': NO_CHANGES_MSG, "success": True,
+ 'mparams': {"device_service_tags": ["ABCTAG2", "BCDTAG2"], 'state': 'absent'}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG2", "Type": 1001}]},
+ 'message': INVALID_DEV_ST.format(",".join(map(str, ["ABCTAG2"]))), "success": True,
+ 'mparams': {"device_service_tags": ["ABCTAG2"], 'state': 'present'}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG2", "Type": 1001}]},
+ 'message': INVALID_DEV_ST.format(",".join(map(str, [24, 25]))), "success": True,
+ 'mparams': {"device_ids": [24, 25], 'state': 'present'}},
+ {"json_data": {"value": []},
+ 'message': INVALID_DEV_ST.format(",".join(map(str, [24]))), "success": True,
+ 'mparams': {"device_ids": [24], 'state': 'present'}}
+ ])
+ def test_ome_devices_delete(self, params, ome_connection_mock_for_devices, ome_response_mock, ome_default_args,
+ module_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_devices.get_all_items_with_pagination.return_value = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000},
+ {'Id': 25, 'Identifier': "BCDTAG2", "Type": 1000}]},
+ 'message': APPLY_TRIGGERED, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False,
+ "job_name": "my test job", "job_description": "My job description"
+ }, "check_similar_job": {}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000},
+ {'Id': 25, 'Identifier': "BCDTAG2", "Type": 1000}]},
+ 'message': APPLY_TRIGGERED, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False
+ }, "check_similar_job": {}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000},
+ {'Id': 25, 'Identifier': "BCDTAG2", "Type": 1000}]},
+ 'message': JOB_SCHEDULED, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False,
+ "job_schedule": "my cron task"
+ }, "check_similar_job": {}},
+ {"json_data": {"value": [{'Id': 24, 'Identifier': "ABCTAG1", "Type": 1000},
+ {'Id': 25, 'Identifier': "BCDTAG2", "Type": 1000}]},
+ 'message': CHANGES_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False},
+ "check_similar_job": {}, "check_mode": True
+ },
+ {"json_data": {
+ "value": [
+ {
+ "Id": 14874,
+ "JobName": "Refresh inventory",
+ "JobDescription": JOB_DESC.format("Refresh inventory", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13123,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "action",
+ "Value": "CONFIG_INVENTORY"
+ },
+ {
+ "JobId": 14874,
+ "Key": "isCollectDriverInventory",
+ "Value": "true"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 8,
+ "Name": "Inventory_Task",
+ },
+ },
+ {
+ "Id": 14874,
+ "JobName": "Refresh inventory",
+ "JobDescription": JOB_DESC.format("Refresh inventory", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "action",
+ "Value": "CONFIG_INVENTORY"
+ },
+ {
+ "JobId": 14874,
+ "Key": "isCollectDriverInventory",
+ "Value": "false"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 8,
+ "Name": "Inventory_Task",
+ },
+ },
+ {
+ "Id": 14874,
+ "JobName": "Refresh inventory",
+ "JobDescription": JOB_DESC.format("Refresh inventory", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "action",
+ "Value": "CONFIG_INVENTORY"
+ },
+ {
+ "JobId": 14874,
+ "Key": "isCollectDriverInventory",
+ "Value": "true"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 8,
+ "Name": "Inventory_Task",
+ },
+ }
+ ]
+ },
+ 'message': APPLY_TRIGGERED, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False
+ }, "get_dev_ids": ([13216], {})},
+
+ {"json_data": {
+ "value": [
+ {
+ "Id": 14874,
+ "JobName": "Refresh inventory",
+ "JobDescription": JOB_DESC.format("Refresh inventory", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "action",
+ "Value": "CONFIG_INVENTORY"
+ },
+ {
+ "JobId": 14874,
+ "Key": "isCollectDriverInventory",
+ "Value": "true"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 8,
+ "Name": "Inventory_Task",
+ },
+ }
+ ]
+ },
+ 'message': CHANGES_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False
+ }, "get_dev_ids": ([13216], {}), "check_mode": True},
+ {"json_data": {
+ "value": [
+ {
+ "Id": 14874,
+ "JobName": "Refresh inventory",
+ "JobDescription": JOB_DESC.format("Refresh inventory", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "action",
+ "Value": "CONFIG_INVENTORY"
+ },
+ {
+ "JobId": 14874,
+ "Key": "isCollectDriverInventory",
+ "Value": "true"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2050,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 8,
+ "Name": "Inventory_Task",
+ },
+ }
+ ]},
+ 'message': NO_CHANGES_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": False
+ }, "get_dev_ids": ([13216], {})},
+ {"json_data": {
+ "value": [
+ {
+ "Id": 14874,
+ "JobName": "Reset iDRAC",
+ "JobDescription": JOB_DESC.format("Reset iDRAC", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "operationName",
+ "Value": "RESET_IDRAC"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2050,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 3,
+ "Name": "DeviceAction_Task",
+ },
+ }
+ ]},
+ 'message': NO_CHANGES_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"],
+ "job_wait": False, "device_action": 'reset_idrac',
+ }, "get_dev_ids": ([13216], {})},
+ {"json_data": {
+ "value": [
+ {
+ "Id": 14874,
+ "JobName": "Clear iDRAC job queue",
+ "JobDescription": JOB_DESC.format("Clear iDRAC job queue", "13216"),
+ "Schedule": "startnow",
+ "State": "Enabled",
+ "Targets": [
+ {
+ "JobId": 14874,
+ "Id": 13216,
+ "Data": "",
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "Params": [
+ {
+ "JobId": 14874,
+ "Key": "deviceTypes",
+ "Value": "1000"
+ },
+ {
+ "JobId": 14874,
+ "Key": "operationName",
+ "Value": "REMOTE_RACADM_EXEC"
+ },
+ {
+ "JobId": 14874,
+ "Key": "Command",
+ "Value": "jobqueue delete -i JID_CLEARALL_FORCE"
+ },
+ {
+ "JobId": 14874,
+ "Key": "CommandTimeout",
+ "Value": "60"
+ }
+ ],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2050,
+ "Name": "Completed"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 3,
+ "Name": "DeviceAction_Task",
+ },
+ }
+ ]},
+ 'message': NO_CHANGES_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"],
+ "job_wait": False, "device_action": 'clear_idrac_job_queue',
+ }, "get_dev_ids": ([13216], {})},
+ {"json_data": {"Id": 14874, "LastRunStatus": {"Id": 2060, "Name": "Completed"}},
+ 'message': SUCCESS_MSG, "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": True
+ }, "check_similar_job": {}, "get_dev_ids": ([13216], {})},
+ {"json_data": {"Id": 14874, "LastRunStatus": {"Id": 2070, "Name": "Completed"},
+ "Value": "Job Tracking has failed"},
+ 'message': "Job Tracking has failed", "success": True, 'mparams': {
+ "device_service_tags": ["ABCTAG1", "BCDTAG2"], "job_wait": True
+ }, "check_similar_job": {}, "get_dev_ids": ([13216], {})}
+ ])
+ def test_ome_devices_main_state_present(self, params, ome_connection_mock_for_devices, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ mocks = ["check_similar_job", "get_dev_ids"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.module_utils.utils." + 'time.sleep', return_value=None)
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_devices_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_devices, ome_response_mock):
+ ome_default_args.update({"state": "absent", "device_service_tags": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_dev_ids', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_dev_ids', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_dev_ids',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py
new file mode 100644
index 00000000..79c94b5c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py
@@ -0,0 +1,300 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_diagnostics
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_diagnostics.'
+
+
+@pytest.fixture
+def ome_conn_mock_diagnostics(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEDiagnostics(FakeAnsibleModule):
+
+ module = ome_diagnostics
+
+ def test_check_domain_service(self, ome_conn_mock_diagnostics, ome_default_args, mocker):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_diagnostics)
+ assert result is None
+
+ def test_group_validation(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"device_group_name": "Servers"})
+ ome_response_mock.json_data = {"value": []}
+ with pytest.raises(Exception) as err:
+ self.module.group_validation(f_module, ome_conn_mock_diagnostics)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target device " \
+ "group name 'Servers' is invalid."
+ ome_response_mock.json_data = {"value": [{"Id": 25011, "Type": 1000}]}
+ result = self.module.group_validation(f_module, ome_conn_mock_diagnostics)
+ assert result == [25011]
+
+ def test_group_validation_s1(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"device_group_name": "Servers"})
+ ome_response_mock.json_data = {"value": [{"Type": 2000, "Id": 10161}]}
+ with pytest.raises(Exception) as err:
+ self.module.group_validation(f_module, ome_conn_mock_diagnostics)
+ assert err.value.args[0] == "The requested group 'Servers' does not contain devices that support export log."
+
+ def test_device_validation(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ resp = {"report_list": [{"Id": 25014, "DeviceServiceTag": "ZXCVB1", "Type": 1000}]}
+ f_module = self.get_module_mock(params={"device_ids": [25011]})
+ ome_conn_mock_diagnostics.get_all_report_details.return_value = resp
+ with pytest.raises(Exception) as err:
+ self.module.device_validation(f_module, ome_conn_mock_diagnostics)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target device " \
+ "id(s) '25011' are invalid."
+ resp = {"report_list": [{"Id": 25011, "DeviceServiceTag": "ZXCVB1", "Type": 1000}]}
+ ome_conn_mock_diagnostics.get_all_report_details.return_value = resp
+ result = self.module.device_validation(f_module, ome_conn_mock_diagnostics)
+ assert result == [25011]
+ f_module = self.get_module_mock(params={"device_service_tags": ["ZXCVB1"]})
+ result = self.module.device_validation(f_module, ome_conn_mock_diagnostics)
+ assert result == [25011]
+ resp = {"report_list": [{"Id": 25019, "DeviceServiceTag": "ZXCVB1", "Type": 8000}]}
+ ome_conn_mock_diagnostics.get_all_report_details.return_value = resp
+ with pytest.raises(Exception) as err:
+ self.module.device_validation(f_module, ome_conn_mock_diagnostics)
+ assert err.value.args[0] == "The requested device service tag(s) 'ZXCVB1' " \
+ "are not applicable for export log."
+
+ def test_extract_log_operation(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
+ "lead_chassis_only": "true"})
+ ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]}
+ ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011}
+ ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \
+ {"value": [{"DomainRoleTypeValue": "LEAD", "DeviceId": 16011}]}
+ result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
+ assert result["Id"] == 16011
+
+ ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \
+ {"value": [{"DomainRoleTypeValue": "STANDALONE", "DeviceId": 16011}]}
+ result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
+ assert result["Id"] == 16011
+
+ f_module = self.get_module_mock(params={"log_type": "support_assist_collection", "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"]})
+ result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics, device_lst=[25012])
+ assert result["Id"] == 16011
+
+ def test_extract_log_operation_member(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
+ "lead_chassis_only": "true"})
+ ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]}
+ ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011}
+ ome_conn_mock_diagnostics.get_all_items_with_pagination.return_value = \
+ {"value": [{"DomainRoleTypeValue": "MEMBER", "DeviceId": 16011}]}
+ with pytest.raises(Exception) as err:
+ self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
+ assert err.value.args[0] == "There is no device(s) available to export application log."
+
+ def test_extract_log_operation_no_lead_chassis(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application",
+ "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], })
+ ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]}
+ ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011}
+ result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
+ assert result["Id"] == 16011
+
+ def test_extract_log_operation_s1(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application",
+ "share_address": "192.168.0.1",
+ "share_type": "NFS",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], })
+ ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]}
+ ome_conn_mock_diagnostics.job_submission.return_value = {"Id": 16011}
+ result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
+ assert result["Id"] == 16011
+
+ def test_main_succes_case(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ ome_default_args.update({"log_type": "support_assist_collection", "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
+ "test_connection": False, "job_wait": True, "device_ids": [25011]})
+ mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
+ mocker.patch(MODULE_PATH + "device_validation", return_value=[25011])
+ mocker.patch(MODULE_PATH + "find_failed_jobs", return_value=("", False))
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ mocker.patch(MODULE_PATH + "extract_log_operation")
+ ome_response_mock.json_data = {"value": {"Id": 25011}}
+ ome_conn_mock_diagnostics.job_tracking.return_value = (False, "")
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Export log job completed successfully."
+
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (False, [25011])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "An export log job is already running. Wait for the job to finish."
+
+ ome_default_args.update({"test_connection": True, "job_wait": False})
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ ome_conn_mock_diagnostics.job_tracking.return_value = (True, "")
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "Unable to access the share. Ensure that the share address, share name, " \
+ "share domain, and share credentials provided are correct."
+
+ def test_main_succes_case02(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ ome_default_args.update({"log_type": "supportassist_collection", "share_address": "192.168.0.1",
+ "share_type": "CIFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
+ "test_connection": False, "job_wait": True, "device_ids": [25011]})
+ mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
+ mocker.patch(MODULE_PATH + "device_validation", return_value=[25011])
+ mocker.patch(MODULE_PATH + "find_failed_jobs", return_value=("", False))
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ mocker.patch(MODULE_PATH + "extract_log_operation")
+ ome_response_mock.json_data = {"value": {"Id": 25011}}
+ ome_conn_mock_diagnostics.job_tracking.return_value = (False, "")
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Export log job completed successfully."
+
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (False, [25011])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "An export log job is already running. Wait for the job to finish."
+
+ ome_default_args.update({"test_connection": True, "job_wait": False})
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ ome_conn_mock_diagnostics.job_tracking.return_value = (True, "")
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "Unable to access the share. Ensure that the share address, share name, " \
+ "share domain, and share credentials provided are correct."
+
+ def test_main_succes_case03(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ ome_default_args.update({"log_type": "application", "share_address": "192.168.0.1",
+ "share_type": "NFS", "share_name": "iso", "mask_sensitive_info": "true",
+ "test_connection": True, "job_wait": True, "device_ids": [25011]})
+ mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
+ mocker.patch(MODULE_PATH + "device_validation", return_value=[25011])
+ mocker.patch(MODULE_PATH + "find_failed_jobs", return_value=("", False))
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ mocker.patch(MODULE_PATH + "extract_log_operation")
+ ome_response_mock.json_data = {"value": {"Id": 25011}}
+ ome_conn_mock_diagnostics.job_tracking.return_value = (False, "")
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Export log job completed successfully."
+
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (False, [25011])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "An export log job is already running. Wait for the job to finish."
+
+ ome_default_args.update({"test_connection": True, "job_wait": False})
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ ome_conn_mock_diagnostics.job_tracking.return_value = (True, "")
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "Unable to access the share. Ensure that the share address, share name, " \
+ "share domain, and share credentials provided are correct."
+
+ def test_main_succes_case04(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ ome_default_args.update({"log_type": "supportassist_collection", "share_address": "192.168.0.1",
+ "share_type": "CIFS", "share_name": "iso", "share_user": "username",
+ "share_password": "password", "share_domain": "domain",
+ "mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
+ "test_connection": False, "job_wait": True, "device_group_name": "Servers"})
+ mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
+ mocker.patch(MODULE_PATH + "group_validation", return_value=[25011])
+ mocker.patch(MODULE_PATH + "find_failed_jobs", return_value=("", False))
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ mocker.patch(MODULE_PATH + "extract_log_operation")
+ ome_response_mock.json_data = {"value": {"Id": 25011}}
+ ome_conn_mock_diagnostics.job_tracking.return_value = (False, "")
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Export log job completed successfully."
+
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (False, [25011])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "An export log job is already running. Wait for the job to finish."
+
+ ome_default_args.update({"test_connection": True, "job_wait": False})
+ ome_conn_mock_diagnostics.check_existing_job_state.return_value = (True, [25011])
+ ome_conn_mock_diagnostics.job_tracking.return_value = (True, "")
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "Unable to access the share. Ensure that the share address, share name, " \
+ "share domain, and share credentials provided are correct."
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_diagnostics_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_diagnostics, ome_response_mock):
+ ome_default_args.update({"log_type": "application", "share_address": "192.168.0.1",
+ "share_type": "NFS", "mask_sensitive_info": False})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["failed"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ def test_find_failed_jobs(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.json_data = {
+ "Id": 25011,
+ "value": [{"Id": 25013, "Value": "Job status for JID_255809594125 is Completed with Errors."}]
+ }
+ result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics)
+ assert result[0] == "Export log job completed with errors."
+ assert result[1] is False
+
+ ome_response_mock.json_data = {
+ "Id": 25011,
+ "value": []
+ }
+ result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics)
+ assert result[0] == "Export log job completed with errors."
+ assert result[1] is False
+
+ ome_response_mock.json_data = {
+ "Id": 25011,
+ "value": [{"Id": 25013, "Value": "Job status for JID_255809594125 is Completed."}]
+ }
+ result = self.module.find_failed_jobs({"Id": 25012}, ome_conn_mock_diagnostics)
+ print(result)
+ assert result[0] == "Export log job completed with errors."
+ assert result[1] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py
new file mode 100644
index 00000000..e84e7c7e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py
@@ -0,0 +1,460 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.3.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_discovery
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_discovery.'
+NO_CHANGES_MSG = "No changes found to be applied."
+DISC_JOB_RUNNING = "Discovery job '{name}' with ID {id} is running. Please retry after job completion."
+DISC_DEL_JOBS_SUCCESS = "Successfully deleted {n} discovery job(s)."
+MULTI_DISCOVERY = "Multiple discoveries present. Run the job again using a specific ID."
+DISCOVERY_SCHEDULED = "Successfully scheduled the Discovery job."
+DISCOVER_JOB_COMPLETE = "Successfully completed the Discovery job."
+JOB_TRACK_SUCCESS = "Discovery job has {0}."
+JOB_TRACK_FAIL = "No devices discovered, job is in {0} state."
+JOB_TRACK_UNABLE = "Unable to track discovery job status of {0}."
+JOB_TRACK_INCOMPLETE = "Discovery job {0} incomplete after polling {1} times."
+INVALID_DEVICES = "Invalid device types found - {0}."
+
+
+@pytest.fixture
+def ome_connection_mock_for_discovery(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeDiscovery(FakeAnsibleModule):
+ module = ome_discovery
+
+ @pytest.mark.parametrize("params", [{"mparams": {"state": "absent", "discovery_job_name": "my_discovery1"},
+ "discov_list": [{"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}],
+ "job_state_dict": {12: 2010}, "res": DISC_DEL_JOBS_SUCCESS.format(n=1),
+ "json_data": 1, "success": True},
+ {"mparams": {"state": "absent", "discovery_job_name": "my_discovery1"},
+ "discov_list": [{"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}],
+ "job_state_dict": {12: 2050},
+ "res": DISC_JOB_RUNNING.format(name='my_discovery1', id=12), "json_data": 1,
+ "success": True}])
+ def test_delete_discovery(self, mocker, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_discovery_states', return_value=params["job_state_dict"])
+ f_module = self.get_module_mock(params=params["mparams"])
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.delete_discovery(f_module, ome_connection_mock_for_discovery, params['discov_list'])
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [{"mparams": {"state": "absent", "discovery_job_name": "my_discovery1"},
+ "res": [{"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}],
+ "json_data": {"value": [{"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}]},
+ "success": True},
+ {"mparams": {"state": "absent", "discovery_id": 12}, "res": [
+ {"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}],
+ "json_data": {"value": [{"DiscoveryConfigGroupId": 11,
+ "DiscoveryConfigGroupName": "my_discovery2"},
+ {"DiscoveryConfigGroupId": 12,
+ "DiscoveryConfigGroupName": "my_discovery1"}]},
+ "success": True}])
+ def test_check_existing_discovery(self, mocker, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ res = self.module.check_existing_discovery(f_module, ome_connection_mock_for_discovery)
+ assert res == params["res"]
+
+ @pytest.mark.parametrize("params", [
+ {"res": {12: 2020}, "json_data": {"value": [{"DiscoveryConfigGroupId": 12, "JobStatusId": 2020}]},
+ "success": True}])
+ def test_get_discovery_states(self, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ res = self.module.get_discovery_states(ome_connection_mock_for_discovery)
+ assert res == params["res"]
+
+ @pytest.mark.parametrize("params", [{"mparams": {"schedule": 'RunNow'},
+ 'schedule_payload': {"RunNow": True, "RunLater": False, 'Cron': "startnow"}},
+ {"mparams": {"schedule": 'RunLater', 'cron': "1 2 3 4 5 *"},
+ 'schedule_payload': {"RunNow": False, "RunLater": True,
+ 'Cron': "1 2 3 4 5 *"}}, ])
+ def test_get_schedule(self, params):
+ f_module = self.get_module_mock(params=params["mparams"])
+ res = self.module.get_schedule(f_module)
+ assert res == params['schedule_payload']
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "value": [{"ProtocolName": "SNMP", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "SNMP", "DeviceTypeId": 5000, "DeviceTypeName": "DELL STORAGE"},
+ {"ProtocolName": "SNMP", "DeviceTypeId": 7000, "DeviceTypeName": "NETWORK SWITCH"},
+ {"ProtocolName": "WSMAN", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "WSMAN", "DeviceTypeId": 2000, "DeviceTypeName": "CHASSIS"},
+ {"ProtocolName": "REDFISH", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "REDFISH", "DeviceTypeId": 2000, "DeviceTypeName": "CHASSIS", },
+ {"ProtocolName": "IPMI", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "SSH", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "VMWARE", "DeviceTypeId": 1000, "DeviceTypeName": "SERVER"},
+ {"ProtocolName": "STORAGE", "DeviceTypeId": 5000, "DeviceTypeName": "DELL STORAGE"}]},
+ "dev_id_map": {"CHASSIS": 2000, "DELL STORAGE": 5000, "NETWORK SWITCH": 7000, "SERVER": 1000, "STORAGE": 5000},
+ "proto_dev_map": {"CHASSIS": ["WSMAN", "REDFISH"], "DELL STORAGE": ["SNMP", "STORAGE"],
+ "NETWORK SWITCH": ["SNMP"],
+ "STORAGE": ["SNMP", "STORAGE"],
+ "SERVER": ["SNMP", "WSMAN", "REDFISH", "IPMI", "SSH", "VMWARE"]}}])
+ def test_get_protocol_device_map(self, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ prot_dev_map, dev_id_map = self.module.get_protocol_device_map(ome_connection_mock_for_discovery)
+ assert prot_dev_map == params['proto_dev_map']
+ assert dev_id_map == params['dev_id_map']
+
+ @pytest.mark.parametrize("params", [{
+ "mparams": {"discovery_job_name": 'd1', 'trap_destination': True, 'community_string': True,
+ 'email_recipient': 'abc@email.com', 'description': "d1_desc"},
+ 'other_dict': {"DiscoveryConfigGroupName": 'd1', "TrapDestination": True, 'CommunityString': True,
+ 'DiscoveryStatusEmailRecipient': 'abc@email.com'}}])
+ def test_get_other_discovery_payload(self, params):
+ f_module = self.get_module_mock(params=params["mparams"])
+ res = self.module.get_other_discovery_payload(f_module)
+ assert res == params['other_dict']
+
+ @pytest.mark.parametrize("params", [{"json_data": {"value": [{"Id": 1, "StartTime": "2021-04-19 04:54:18.427"},
+ {"Id": 2, "StartTime": "2021-04-19 04:55:18.427"}]},
+ "ips": {"Failed": ["192.168.1.2"], "Completed": ["192.168.1.3"]},
+ "pag_ret_val": {
+ "value": [{"Key": "192.168.1.2", "JobStatus": {"Name": "Failed"}},
+ {"Key": "192.168.1.3", "JobStatus": {"Name": "Completed"}}]}}])
+ def test_get_execution_details(self, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ ome_connection_mock_for_discovery.get_all_items_with_pagination.return_value = params['pag_ret_val']
+ f_module = self.get_module_mock()
+ ips = self.module.get_execution_details(f_module, ome_connection_mock_for_discovery, 1)
+ assert ips == params['ips']
+
+ @pytest.mark.parametrize("params", [{"json_data": {'JobStatusId': 2060}, 'job_wait_sec': 60, 'job_failed': False,
+ "msg": JOB_TRACK_SUCCESS.format('completed successfully')},
+ {"json_data": {'JobStatusId': 2070}, 'job_wait_sec': 60, 'job_failed': True,
+ "msg": JOB_TRACK_FAIL.format('Failed')},
+ {"json_data": {'JobStatusId': 2050}, 'job_wait_sec': 60, 'job_failed': True,
+ "msg": JOB_TRACK_INCOMPLETE.format(1, 2)}, ])
+ def test_discovery_job_tracking(self, params, mocker, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ job_failed, msg = self.module.discovery_job_tracking(ome_connection_mock_for_discovery, 1,
+ params['job_wait_sec'])
+ assert job_failed == params['job_failed']
+ assert msg == params['msg']
+
+ @pytest.mark.parametrize("params", [{"discovery_json": {'DiscoveryConfigTaskParam': [{'TaskId': 12}]},
+ 'job_id': 12, "json_data": {"value": [{"Id": 1}, {"Id": 2}]}},
+ {"discovery_json": {'DiscoveryConfigGroupId': 123,
+ 'DiscoveryConfigTaskParam': [{'TaskId': 12},
+ {'TaskId': 23}]},
+ 'job_id': 12, "json_data": {"value": [{'DiscoveryConfigGroupId': 234,
+ "JobId": 2},
+ {'DiscoveryConfigGroupId': 123,
+ "JobId": 12}, ]}}])
+ def test_get_job_data(self, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ job_id = self.module.get_job_data(params['discovery_json'], ome_connection_mock_for_discovery)
+ assert job_id == params['job_id']
+
+ @pytest.mark.parametrize("params", [{"disc_config": {
+ "ipmi": {"kgkey": None, "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "retries": 3, "timeout": 60,
+ "username": "root"},
+ "wsman": {"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "retries": 3, "timeout": 60, "username": "root"}},
+ 'conn_profile': {"credentials": [{"authType": "Basic", "credentials": {"kgkey": None,
+ "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
+ "retries": 3, "timeout": 60,
+ "username": "root"}, "modified": False,
+ "type": "IPMI"}], "profileDescription": "", "profileId": 0, "profileName": "",
+ "type": "DISCOVERY"}}])
+ def test_get_connection_profile(self, params):
+ conn_profile = self.module.get_connection_profile(params['disc_config'])
+ assert conn_profile['type'] == params['conn_profile']['type']
+
+ @pytest.mark.parametrize("params", [{"disc_cfg_list": [{
+ "ConnectionProfile": "{\"profileDescription\": \"\", \"profileId\": 0, \"type\": \"DISCOVERY\", \"credentials\""
+ ": [{\"credentials\": {\"retries\": 3, \"community\": \"public\", \"timeout\": 3, \"port\""
+ ": 161}, \"authType\": \"Basic\", \"type\": \"SNMP\", \"modified\": False}], "
+ "\"profileName\": \"\"}", "DeviceType": [1000],
+ "DiscoveryConfigTargets": [{"NetworkAddressDetail": "196.168.24.17"}]}],
+ "get_conn_json": {"profileId": 0, "profileName": "", "profileDescription": "", "type": "DISCOVERY",
+ 'credentials': [{'authType': 'Basic',
+ 'credentials': {'community': 'public', 'port': 161, 'retries': 3,
+ 'timeout': 3}, 'id': 116, 'modified': False,
+ 'type': 'SNMP'}]}, "DeviceType": [1000],
+ "DiscoveryConfigTargets": [{"NetworkAddressDetail": "196.168.24.17"}], 'mparams': {'discovery_config_targets': [
+ {"device_types": ["SERVER"], "network_address_detail": ["196.168.24.17"],
+ "snmp": {"community": "public", "port": 161, "retries": 3, "timeout": 3}}]}}])
+ def test_get_discovery_config(self, params, mocker, ome_connection_mock_for_discovery, ):
+ dev_id_map = {"CHASSIS": 2000, "DELL STORAGE": 5000, "NETWORK SWITCH": 7000, "SERVER": 1000, "STORAGE": 5000}
+ proto_dev_map = {"CHASSIS": ["WSMAN", "REDFISH"], "DELL STORAGE": ["SNMP", "STORAGE"],
+ "NETWORK SWITCH": ["SNMP"], "SERVER": ["SNMP", "WSMAN", "REDFISH", "IPMI", "SSH", "VMWARE"]}
+ f_module = self.get_module_mock(params=params['mparams'])
+ mocker.patch(MODULE_PATH + 'get_protocol_device_map', return_value=(proto_dev_map, dev_id_map))
+ mocker.patch(MODULE_PATH + 'get_connection_profile', return_value=params['get_conn_json'])
+ disc_cfg_list = self.module.get_discovery_config(f_module, ome_connection_mock_for_discovery)
+ assert disc_cfg_list[0]['DeviceType'] == params['DeviceType']
+ assert disc_cfg_list[0]['DiscoveryConfigTargets'] == params[
+ 'DiscoveryConfigTargets'] # assert disc_cfg_list == params['disc_cfg_list']
+
+ @pytest.mark.parametrize("params", [{"json_data": {"@odata.type": "#DiscoveryConfigService.DiscoveryJob",
+ "@odata.id": "/api/DiscoveryConfigService/Jobs(12617)",
+ "JobId": 12617, "JobName": "D1", "JobSchedule": "startnow",
+ "DiscoveryConfigExpectedDeviceCount": 713,
+ "DiscoveryConfigDiscoveredDeviceCount": 0,
+ "DiscoveryConfigEmailRecipient": "jag@dell.com", },
+ "djob": {"JobId": 12617, "JobName": "D1", "JobSchedule": "startnow",
+ "DiscoveryConfigExpectedDeviceCount": 713,
+ "DiscoveryConfigDiscoveredDeviceCount": 0,
+ "DiscoveryConfigEmailRecipient": "jag@dell.com", }}])
+ def test_get_discovery_job(self, params, ome_connection_mock_for_discovery, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ djob = self.module.get_discovery_job(ome_connection_mock_for_discovery, 12)
+ assert djob == params['djob']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': False, 'job_message': DISCOVER_JOB_COMPLETE,
+ 'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True, 'job_message': JOB_TRACK_FAIL,
+ 'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True, 'job_message': DISCOVERY_SCHEDULED,
+ 'mparams': {'job_wait': False, 'schedule': 'RunLater', 'job_wait_timeout': 1000}}])
+ def test_create_discovery(self, params, mocker, ome_connection_mock_for_discovery, ome_response_mock):
+ mocker.patch(MODULE_PATH + 'get_discovery_config', return_value={})
+ mocker.patch(MODULE_PATH + 'get_schedule', return_value={})
+ mocker.patch(MODULE_PATH + 'get_other_discovery_payload', return_value={})
+ mocker.patch(MODULE_PATH + 'get_job_data', return_value=12)
+ mocker.patch(MODULE_PATH + 'get_execution_details', return_value={})
+ mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={})
+ mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_failed'], params['job_message']))
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params['mparams'])
+ error_message = params["job_message"]
+ with pytest.raises(Exception) as err:
+ self.module.create_discovery(f_module, ome_connection_mock_for_discovery)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_discovery_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_discovery, ome_response_mock):
+ ome_default_args.update({"state": "absent", "discovery_job_name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_existing_discovery', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_existing_discovery', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_existing_discovery',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ @pytest.mark.parametrize(
+ "params", [{"json_data": {"DiscoveryConfigGroupName": 'd1'},
+ 'job_failed': False, 'job_message': DISCOVER_JOB_COMPLETE,
+ 'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True,
+ 'job_message': JOB_TRACK_FAIL,
+ 'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True,
+ 'job_message': DISCOVERY_SCHEDULED,
+ 'mparams': {'job_wait': False, 'schedule': 'RunLater', 'job_wait_timeout': 1000}}])
+ def test_modify_discovery(self, params, mocker, ome_connection_mock_for_discovery, ome_response_mock):
+ discov_list = [{"DiscoveryConfigGroupId": 12, "DiscoveryConfigGroupName": "my_discovery1"}]
+ f_module = self.get_module_mock(params=params['mparams'])
+ mocker.patch(MODULE_PATH + 'get_other_discovery_payload', return_value={"DiscoveryConfigGroupId": 10})
+ mocker.patch(MODULE_PATH + 'update_modify_payload', return_value=None)
+ mocker.patch(MODULE_PATH + 'get_job_data', return_value=12)
+ mocker.patch(MODULE_PATH + 'get_execution_details', return_value={})
+ mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={})
+ mocker.patch(MODULE_PATH + 'get_discovery_config', return_value={})
+ mocker.patch(MODULE_PATH + 'get_discovery_states', return_value={12: 15})
+ mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_failed'], params['job_message']))
+ error_message = params["job_message"]
+ with pytest.raises(Exception) as err:
+ self.module.modify_discovery(f_module, ome_connection_mock_for_discovery, discov_list)
+ assert err.value.args[0] == error_message
+
+ def test_modify_discovery_failure_case01(self, ome_connection_mock_for_discovery):
+ multi_disc_msg = MULTI_DISCOVERY
+ f_module = self.get_module_mock(params={'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000})
+ with pytest.raises(Exception) as err:
+ self.module.modify_discovery(f_module, ome_connection_mock_for_discovery,
+ [{"DiscoveryConfigGroupId": 1, "DiscoveryConfigGroupName": "my_discovery1"},
+ {"DiscoveryConfigGroupId": 2, "DiscoveryConfigGroupName": "my_discovery2"}])
+ assert err.value.args[0] == multi_disc_msg
+
+ def test_modify_discovery_failure_case2(self, mocker, ome_connection_mock_for_discovery):
+ f_module = self.get_module_mock(params={'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000})
+ job_run_msg = DISC_JOB_RUNNING.format(name='my_discovery1', id=12)
+ mocker.patch(MODULE_PATH + 'get_discovery_states', return_value={12: 2050})
+ with pytest.raises(Exception) as err:
+ self.module.modify_discovery(f_module, ome_connection_mock_for_discovery, [
+ {"DiscoveryConfigGroupId": 12, "DiscoveryConfigGroupName": "my_discovery1"}])
+ assert err.value.args[0] == job_run_msg
+
+ def test_update_modify_payload(self):
+ current_payload = {
+ "DiscoveryConfigGroupId": 21,
+ "DiscoveryConfigGroupName": "Discoverystorage",
+ "DiscoveryStatusEmailRecipient": None,
+ "DiscoveryConfigModels": [
+ {
+ "DiscoveryConfigId": 41,
+ "DiscoveryConfigStatus": None,
+ "DiscoveryConfigTargets": [
+ {
+ "DiscoveryConfigTargetId": 41,
+ "NetworkAddressDetail": "mock_network_address",
+ "SubnetMask": None,
+ "AddressType": 1,
+ "Disabled": False,
+ "Exclude": False
+ }
+ ],
+ "ConnectionProfileId": 21341,
+ "ConnectionProfile": "{\n \"profileId\" : 21341,\n \"profileName\" : \"\","
+ "\n \"profileDescription\" : \"\",\n \"type\" : \"DISCOVERY\","
+ "\n \"updatedBy\" : null,\n \"updateTime\" : 1617952521213,"
+ "\n \"credentials\" : [ {\n \"type\" : \"STORAGE\",\n \"authType\" : "
+ "\"Basic\",\n \"modified\" : false,\n \"id\" : 44,"
+ "\n \"credentials\" : {\n \"username\" : \"root\","
+ "\n \"password\" : null,\n \"domain\" : null,\n \"caCheck\" : "
+ "false,\n \"cnCheck\" : false,\n \"certificateData\" : null,"
+ "\n \"certificateDetail\" : null,\n \"port\" : 443,"
+ "\n \"retries\" : 3,\n \"timeout\" : 60,\n \"isHttp\" : "
+ "false,\n \"keepAlive\" : true,\n \"version\" : null\n }\n } "
+ "]\n}",
+ "DeviceType": [
+ 5000
+ ]
+ }
+ ],
+ "Schedule": {
+ "RunNow": False,
+ "RunLater": False,
+ "Recurring": None,
+ "Cron": "startnow",
+ "StartTime": None,
+ "EndTime": None
+ },
+ "TrapDestination": False,
+ "CommunityString": False,
+ "UseAllProfiles": False,
+ "CreateGroup": True
+ }
+ discovery_modify_payload = {
+ "DiscoveryConfigGroupName": "name1"
+ }
+ self.module.update_modify_payload(discovery_modify_payload, current_payload, new_name="name2")
+ assert discovery_modify_payload["DiscoveryConfigGroupName"] == "name2"
+ assert discovery_modify_payload["Schedule"]["RunNow"] is True
+ assert discovery_modify_payload["Schedule"]["RunLater"] is False
+ assert discovery_modify_payload["Schedule"]["Cron"] == "startnow"
+
+ def test_update_modify_payload_case2(self):
+ current_payload = {
+ "DiscoveryConfigGroupId": 21,
+ "DiscoveryConfigGroupName": "Discoverystorage",
+ "DiscoveryStatusEmailRecipient": None,
+ "DiscoveryConfigModels": [
+ {
+ "DiscoveryConfigId": 41,
+ "DiscoveryConfigStatus": None,
+ "DiscoveryConfigTargets": [
+ {
+ "DiscoveryConfigTargetId": 41,
+ "NetworkAddressDetail": "mock_network_address",
+ "SubnetMask": None,
+ "AddressType": 1,
+ "Disabled": False,
+ "Exclude": False
+ }
+ ],
+ "ConnectionProfileId": 21341,
+ "ConnectionProfile": "{\n \"profileId\" : 21341,\n \"profileName\" : \"\","
+ "\n \"profileDescription\" : \"\",\n \"type\" : \"DISCOVERY\","
+ "\n \"updatedBy\" : null,\n \"updateTime\" : 1617952521213,"
+ "\n \"credentials\" : [ {\n \"type\" : \"STORAGE\",\n \"authType\" : "
+ "\"Basic\",\n \"modified\" : false,\n \"id\" : 44,"
+ "\n \"credentials\" : {\n \"username\" : \"root\","
+ "\n \"password\" : null,\n \"domain\" : null,\n \"caCheck\" : "
+ "false,\n \"cnCheck\" : false,\n \"certificateData\" : null,"
+ "\n \"certificateDetail\" : null,\n \"port\" : 443,"
+ "\n \"retries\" : 3,\n \"timeout\" : 60,\n \"isHttp\" : "
+ "false,\n \"keepAlive\" : true,\n \"version\" : null\n }\n } "
+ "]\n}",
+ "DeviceType": [
+ 5000
+ ]
+ }
+ ],
+ "Schedule": {
+ "RunNow": False,
+ "RunLater": False,
+ "Recurring": None,
+ "Cron": "startnow",
+ "StartTime": None,
+ "EndTime": None
+ },
+ "TrapDestination": False,
+ "CommunityString": False,
+ "UseAllProfiles": False,
+ "CreateGroup": True
+ }
+ discovery_modify_payload = {
+ "DiscoveryConfigGroupName": "name1",
+ "TrapDestination": True,
+ "CommunityString": True,
+ "Schedule": {
+ "Cron": "startlater",
+ "RunNow": False,
+
+ }
+ }
+ self.module.update_modify_payload(discovery_modify_payload, current_payload)
+ assert discovery_modify_payload["DiscoveryConfigGroupName"] == "name1"
+ assert discovery_modify_payload["TrapDestination"] is True
+ assert discovery_modify_payload["CommunityString"] is True
+ assert discovery_modify_payload["Schedule"]["Cron"] == "startlater"
+ assert discovery_modify_payload["Schedule"]["RunNow"] is False
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py
new file mode 100644
index 00000000..c931ed82
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py
@@ -0,0 +1,198 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.0.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_domain_user_groups
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_domain_user_groups.'
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+
+
+@pytest.fixture
+def ome_conn_mock_ad(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEADUser(FakeAnsibleModule):
+
+ module = ome_domain_user_groups
+
+ def test_get_directory_user(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"state": "absent", "group_name": "Administrator"})
+ ome_response_mock.json_data = {"value": [{"UserName": "Administrator", "RoleId": "10", "UserTypeId": 2}]}
+ result = self.module.get_directory_user(f_module, ome_conn_mock_ad)
+ assert result["UserName"] == "Administrator"
+
+ f_module = self.get_module_mock(params={"state": "absent"})
+ ome_response_mock.json_data = {"value": [{"UserName": "Administrator", "RoleId": "10", "UserTypeId": 2}]}
+ with pytest.raises(Exception) as err:
+ self.module.get_directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "missing required arguments: group_name"
+
+ f_module = self.get_module_mock(params={"state": "absent", "group_name": "Administrator"})
+ f_module.check_mode = True
+ ome_response_mock.json_data = {"value": [{"UserName": "Administrator", "RoleId": "10", "UserTypeId": 2}]}
+ with pytest.raises(Exception) as err:
+ self.module.get_directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "Changes found to be applied."
+
+ f_module = self.get_module_mock(params={"state": "absent", "group_name": "Administrator"})
+ f_module.check_mode = True
+ ome_response_mock.json_data = {"value": []}
+ with pytest.raises(Exception) as err:
+ self.module.get_directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "No changes found to be applied."
+
+ f_module = self.get_module_mock(params={"state": "absent", "group_name": "Administrator"})
+ ome_response_mock.json_data = {"value": []}
+ with pytest.raises(Exception) as err:
+ self.module.get_directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == NO_CHANGES_MSG
+
+ def test_delete_directory_user(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.status_code = 204
+ msg, changed = self.module.delete_directory_user(ome_conn_mock_ad, 15011)
+ assert msg == "Successfully deleted the active directory user group."
+ assert changed is True
+
+ def test_get_role(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"state": "present", "group_name": "Administrator",
+ "role": "Administrator"})
+ ome_response_mock.json_data = {"value": [{"Name": "ADMINISTRATOR", "Id": 10}]}
+ result = self.module.get_role(f_module, ome_conn_mock_ad)
+ assert result == 10
+
+ f_module = self.get_module_mock(params={"state": "present", "group_name": "Administrator",
+ "role": "Administrator"})
+ ome_response_mock.json_data = {"value": [{"Name": "ADMIN", "Id": 10}]}
+ with pytest.raises(Exception) as err:
+ self.module.get_role(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "Unable to complete the operation because the entered " \
+ "role name 'Administrator' does not exist."
+
+ f_module = self.get_module_mock(params={"state": "present", "group_name": "Administrator"})
+ ome_response_mock.json_data = {"value": [{"Name": "ADMIN", "Id": 10}]}
+ with pytest.raises(Exception) as err:
+ self.module.get_role(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "missing required arguments: role"
+
+ def test_search_directory(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"state": "present", "group_name": "Administrator",
+ "domain_username": "admin@dev0", "domain_password": "password"})
+ ome_response_mock.json_data = [{"CommonName": "Administrator", "ObjectGuid": "object_id"}]
+ obj_id, name = self.module.search_directory(f_module, ome_conn_mock_ad, 16011)
+ assert obj_id == "object_id"
+
+ f_module = self.get_module_mock(params={"state": "present", "group_name": "Admin",
+ "domain_username": "admin@dev0", "domain_password": "password"})
+ with pytest.raises(Exception) as err:
+ self.module.search_directory(f_module, ome_conn_mock_ad, 16011)
+ assert err.value.args[0] == "Unable to complete the operation because the entered " \
+ "group name 'Admin' does not exist."
+
+ def test_get_directory(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as err:
+ self.module.get_directory(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "missing required arguments: directory_name or directory_id"
+
+ f_module = self.get_module_mock(params={"directory_name": "test_directory"})
+ ome_response_mock.json_data = {'value': [{"Name": "test_directory", "Id": 1}]}
+ result = self.module.get_directory(f_module, ome_conn_mock_ad)
+ assert result == 1
+
+ f_module = self.get_module_mock(params={"directory_id": 2})
+ ome_response_mock.json_data = {'value': [{"Name": "test_directory", "Id": 2}]}
+ result = self.module.get_directory(f_module, ome_conn_mock_ad)
+ assert result == 2
+
+ f_module = self.get_module_mock(params={"directory_id": 3})
+ with pytest.raises(Exception) as err:
+ self.module.get_directory(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "Unable to complete the operation because the entered " \
+ "directory id '3' does not exist."
+
+ def test_directory_user(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"group_name": "Administrator", "role": "administrator"})
+ mocker.patch(MODULE_PATH + "get_directory_user", return_value={"UserName": "Administrator", "Id": 15011,
+ "RoleId": "10", "Enabled": True})
+ mocker.patch(MODULE_PATH + "get_role", return_value=16)
+ mocker.patch(MODULE_PATH + "get_directory", return_value=10612)
+ mocker.patch(MODULE_PATH + "search_directory", return_value=("obj_gui_id", "administrator"))
+ ome_response_mock.json_data = [{"Name": "Account Operators", "Id": "16617", "ObjectGuid": "a491859c"}]
+ resp, msg = self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert msg == 'updated'
+
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "Changes found to be applied."
+
+ mocker.patch(MODULE_PATH + "get_directory_user", return_value={"UserName": "Administrator", "Id": 15011,
+ "RoleId": "16", "Enabled": True})
+ with pytest.raises(Exception) as err:
+ self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "No changes found to be applied."
+
+ f_module.check_mode = False
+ mocker.patch(MODULE_PATH + "get_directory_user", return_value={"UserName": "Administrator", "Id": 15011,
+ "RoleId": "16", "Enabled": True})
+ with pytest.raises(Exception) as err:
+ self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == NO_CHANGES_MSG
+
+ mocker.patch(MODULE_PATH + "get_directory_user", return_value=None)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as err:
+ self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert err.value.args[0] == "Changes found to be applied."
+
+ f_module.check_mode = False
+ resp, msg = self.module.directory_user(f_module, ome_conn_mock_ad)
+ assert msg == "imported"
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_domain_exception(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_ad, ome_response_mock):
+ ome_default_args.update({"state": "absent"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_directory_user', side_effect=exc_type("url open error"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["failed"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_directory_user', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_directory_user',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py
new file mode 100644
index 00000000..082b8293
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py
@@ -0,0 +1,554 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+from mock import patch, mock_open
+
+import pytest
+import json
+import sys
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+NO_CHANGES_MSG = "No changes found to be applied. Either there are no updates present or components specified are not" \
+ " found in the baseline."
+COMPLIANCE_READ_FAIL = "Failed to read compliance report."
+APPLICABLE_DUP = "Unable to get applicable components DUP."
+
+device_resource = {"device_path": "DeviceService/Devices"}
+
+
+@pytest.fixture
+def ome_connection_firmware_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_firmware.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeFirmware(FakeAnsibleModule):
+ module = ome_firmware
+
+ @pytest.fixture
+ def get_dup_file_mock(self):
+ m = mock_open()
+ m.return_value.readlines.return_value = ['this is line 1\n']
+
+ payload = {
+ "Builtin": False,
+ "CreatedBy": "admin",
+ "Editable": True,
+ "EndTime": None,
+ "Id": 29099,
+ "JobDescription": "Firmware Update Task",
+ "JobName": "Firmware Update Task",
+ "JobStatus": {
+ "Id": 2080,
+ "Name": "New"
+ },
+ "JobType": {
+ "Id": 5,
+ "Internal": False,
+ "Name": "Update_Task"
+ },
+ "LastRun": None,
+ "LastRunStatus": {
+ "Id": 2200,
+ "Name": "NotRun"
+ },
+ "NextRun": None,
+ "Params": [
+ {
+ "JobId": 29099,
+ "Key": "operationName",
+ "Value": "INSTALL_FIRMWARE"
+ },
+ {
+ "JobId": 29099,
+ "Key": "complianceUpdate",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "stagingValue",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "signVerify",
+ "Value": "true"
+ }
+ ],
+ "Schedule": "startnow",
+ "StartTime": None,
+ "State": "Enabled",
+ "Targets": [
+ {
+ "Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577776981156",
+ "Id": 28628,
+ "JobId": 29099,
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "UpdatedBy": None,
+ "Visible": True
+ }
+
+ @pytest.mark.parametrize("param", [payload])
+ def test_spawn_update_job_case(self, param, ome_response_mock,
+ ome_connection_firmware_mock):
+ ome_response_mock.status_code = 201
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"Builtin": False,
+ "CreatedBy": "admin",
+ "Editable": True,
+ "EndTime": None,
+ "Id": 29099,
+ "JobDescription": "Firmware Update Task",
+ "JobName": "Firmware Update Task",
+ "JobStatus": {"Id": 2080,
+ "Name": "New"},
+ "JobType": {"Id": 5,
+ "Internal": False,
+ "Name": "Update_Task"},
+ "LastRun": None,
+ "LastRunStatus": {"Id": 2200,
+ "Name": "NotRun"},
+ "NextRun": None,
+ "Params": [{"JobId": 29099,
+ "Key": "operationName",
+ "Value": "INSTALL_FIRMWARE"},
+ {"JobId": 29099,
+ "Key": "complianceUpdate",
+ "Value": "false"},
+ {"JobId": 29099,
+ "Key": "stagingValue",
+ "Value": "false"},
+ {"JobId": 29099,
+ "Key": "signVerify",
+ "Value": "true"}],
+
+ "Schedule": "startnow",
+ "StartTime": None,
+ "State": "Enabled",
+ "Targets": [{"Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577776981156",
+ "Id": 28628,
+ "JobId": 29099,
+ "TargetType": {"Id": 1000,
+ "Name": "DEVICE"}}],
+ "UpdatedBy": None,
+ "Visible": True}
+ result = self.module.spawn_update_job(ome_connection_firmware_mock, param)
+ assert result == param
+
+ payload1 = {
+ "Id": 0, "JobName": "Firmware Update Task",
+ "JobDescription": "Firmware Update Task", "Schedule": "startnow",
+ "State": "Enabled", "CreatedBy": "admin",
+ "JobType": {"Id": 5, "Name": "Update_Task"},
+ "Targets": [{
+ "Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577786112600",
+ "Id": 28628,
+ "TargetType": {
+ "Id": 1000,
+ "Name": "SERVER"
+ }
+ }],
+ "Params": [{"JobId": 0, "Key": "operationName", "Value": "INSTALL_FIRMWARE"},
+ {"JobId": 0, "Key": "complianceUpdate", "Value": "false"},
+ {"JobId": 0, "Key": "stagingValue", "Value": "false"},
+ {"JobId": 0, "Key": "signVerify", "Value": "true"}]
+ }
+ target_data = [
+ {
+ "Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577786112600",
+ "Id": 28628,
+ "TargetType": {
+ "Id": 1000,
+ "Name": "SERVER"
+ }
+ }
+ ]
+
+ @pytest.mark.parametrize("param", [{"inp": target_data, "out": payload1}])
+ def _test_job_payload_for_update_success_case(self,
+ ome_connection_firmware_mock, param):
+ f_module = self.get_module_mock()
+ payload = self.module.job_payload_for_update(f_module,
+ ome_connection_firmware_mock, param["inp"])
+ assert payload == param["out"]
+
+ dupdata = [{"DeviceId": 1674, "DeviceReport": {"DeviceTypeId": "1000", "DeviceTypeName": "SERVER"}},
+ {"DeviceId": 1662, "DeviceReport": {"DeviceTypeId": "1000", "DeviceTypeName": "SERVER"}}]
+
+ filepayload1 = {'SingleUpdateReportBaseline': [],
+ 'SingleUpdateReportGroup': [],
+ 'SingleUpdateReportFileToken': 1577786112600,
+ 'SingleUpdateReportTargets': [1674, 2222, 3333]}
+
+ @pytest.mark.parametrize("param", [{"inp": filepayload1, "outp": target_data}])
+ def test_get_applicable_components_success_case(self, param, ome_default_args, ome_response_mock,
+ ome_connection_firmware_mock):
+ ome_response_mock.json_data = [
+ {
+ "DeviceId": 28628,
+ "DeviceReport": {
+ "Components": [
+ {
+ "ComponentCriticality": "Recommended",
+ "ComponentCurrentVersion": "2.4.7",
+ "ComponentName": "PowerEdge BIOS",
+ "ComponentRebootRequired": "true",
+ "ComponentSourceName": "DCIM:INSTALLED#741__BIOS.Setup.1-1",
+ "ComponentTargetIdentifier": "159",
+ "ComponentUniqueIdentifier": "72400448-3a22-4da9-bd19-27a0e2082962",
+ "ComponentUpdateAction": "EQUAL",
+ "ComponentUriInformation": None,
+ "ComponentVersion": "2.4.7",
+ "ImpactAssessment": "",
+ "IsCompliant": "OK",
+ "PrerequisiteInfo": ""
+ }
+ ],
+ "DeviceIPAddress": "192.168.0.3",
+ "DeviceId": "28628",
+ "DeviceModel": "PowerEdge R940",
+ "DeviceName": "192.168.0.3",
+ "DeviceServiceTag": "HC2XFL2",
+ "DeviceTypeId": "1000",
+ "DeviceTypeName": "SERVER"
+ }
+ }
+ ]
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ f_module = self.get_module_mock()
+ result = self.module.get_applicable_components(ome_connection_firmware_mock, param["inp"], f_module)
+ assert result == param["outp"]
+
+ @pytest.mark.parametrize("param", [payload])
+ def test_get_applicable_components_failed_case(self, param, ome_default_args, ome_response_mock):
+ ome_response_mock.json_data = {
+ "value": [{"DeviceReport": {"DeviceTypeId": "1000", "DeviceTypeName": "SERVER"}, "DeviceId": "Id"}]}
+ ome_response_mock.status_code = 500
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_applicable_components(ome_response_mock, param, f_module)
+ assert exc.value.args[0] == APPLICABLE_DUP
+
+ filepayload = {'SingleUpdateReportBaseline': [],
+ 'SingleUpdateReportGroup': [],
+ 'SingleUpdateReportTargets': [],
+ 'SingleUpdateReportFileToken': '1577786112600'}
+
+ outpayload = {'SingleUpdateReportBaseline': [],
+ 'SingleUpdateReportGroup': [],
+ 'SingleUpdateReportTargets': [],
+ 'SingleUpdateReportFileToken': '1577786112600'}
+
+ @pytest.mark.parametrize(
+ "duppayload",
+ [
+ {'file_token': '1577786112600', 'device_ids': None, 'group_ids': None, 'baseline_ids': None,
+ "out": outpayload},
+ {'file_token': '1577786112600', 'device_ids': [123], 'group_ids': None, 'baseline_ids': None,
+ "out": {'SingleUpdateReportBaseline': [],
+ 'SingleUpdateReportGroup': [],
+ 'SingleUpdateReportTargets': [123],
+ 'SingleUpdateReportFileToken': '1577786112600'}},
+ {'file_token': '1577786112600', 'device_ids': None, 'group_ids': [123], 'baseline_ids': None,
+ "out": {'SingleUpdateReportBaseline': [],
+ 'SingleUpdateReportGroup': [123],
+ 'SingleUpdateReportTargets': [],
+ 'SingleUpdateReportFileToken': '1577786112600'}},
+ {'file_token': '1577786112600', 'device_ids': None, 'group_ids': None, 'baseline_ids': [123],
+ "out": {'SingleUpdateReportBaseline': [123],
+ 'SingleUpdateReportGroup': [],
+ 'SingleUpdateReportTargets': [],
+ 'SingleUpdateReportFileToken': '1577786112600'}}])
+ def test_get_dup_applicability_payload_success_case(self, duppayload):
+ data = self.module.get_dup_applicability_payload(
+ duppayload.get('file_token'),
+ duppayload.get('device_ids'), duppayload.get('group_ids'), duppayload.get('baseline_ids'))
+ assert data == duppayload["out"]
+
+ def test_upload_dup_file_success_case01(self, ome_connection_firmware_mock, ome_response_mock):
+ ome_response_mock.json_data = "1577786112600"
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ f_module = self.get_module_mock(params={'dup_file': "/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE"})
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data="data")) as mock_file:
+ result = self.module.upload_dup_file(ome_connection_firmware_mock, f_module)
+ assert result == (True, "1577786112600")
+
+ def test_upload_dup_file_failure_case02(self, ome_default_args,
+ ome_connection_firmware_mock, ome_response_mock):
+ ome_response_mock.json_data = {"value": [{"Id": [1111, 2222, 3333], "DeviceServiceTag": "KLBR222",
+ "dup_file": "/root/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE"}]}
+ ome_response_mock.status_code = 500
+
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ f_module = self.get_module_mock(
+ params={'dup_file': "/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE", 'hostname': '192.168.0.1'})
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data="data")) as mock_file:
+ with pytest.raises(Exception) as exc:
+ self.module.upload_dup_file(ome_connection_firmware_mock, f_module)
+ assert exc.value.args[0] == "Unable to upload {0} to {1}".format('/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE',
+ '192.168.0.1')
+
+ def test_get_device_ids_success_case(self, ome_connection_firmware_mock, ome_response_mock, ome_default_args):
+ ome_default_args.update()
+ f_module = self.get_module_mock()
+ ome_connection_firmware_mock.get_all_report_details.return_value = {
+ "report_list": [{'Id': 1111, 'DeviceServiceTag': "ABC1111"},
+ {'Id': 2222, 'DeviceServiceTag': "ABC2222"},
+ {'Id': 3333, 'DeviceServiceTag': "ABC3333"},
+ {'Id': 4444, 'DeviceServiceTag': "ABC4444"}]}
+ data, id_tag_map = self.module.get_device_ids(ome_connection_firmware_mock, f_module, [1111, 2222, 3333, "ABC4444"])
+ assert data == ['1111', '2222', '3333', '4444']
+
+ def test_get_device_ids_failure_case01(self, ome_connection_firmware_mock, ome_response_mock):
+ ome_response_mock.json_data = {'value': [{'Id': 'DeviceServiceTag'}]}
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_ids(ome_connection_firmware_mock, f_module, [2222])
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target device service" \
+ " tag(s) or device id(s) '{0}' are invalid.".format("2222")
+
+ def test__validate_device_attributes_success_case(self, ome_connection_firmware_mock, ome_response_mock,
+ ome_default_args):
+ ome_default_args.update({'device_service_tag': ['R9515PT'], 'device_id': [2222]})
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = {'value': [{'device_service_tag': ['R9515PT'], 'device_id': [2222]}]}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params={'device_service_tag': ['R9515PT'], 'device_id': [2222],
+ 'devices': [{'id': 1234}, {'service_tag': "ABCD123"}]})
+ data = self.module._validate_device_attributes(f_module)
+ assert "R9515PT" in data
+
+ def test__validate_device_attributes_failed_case(self, ome_connection_firmware_mock, ome_response_mock):
+ ome_response_mock.json_data = {'value': [{'device_service_tag': None, 'device_id': None}]}
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ # with pytest.raises(Exception) as exc:
+ devlist = self.module._validate_device_attributes(f_module)
+ assert devlist == []
+ # assert exc.value.args[0] == "Either device_id or device_service_tag or device_group_names" \
+ # " or baseline_names should be specified."
+
+ def test_get_group_ids_fail_case(self, ome_default_args, ome_response_mock, ome_connection_firmware_mock):
+ ome_default_args.update({'device_group_names': ["Servers"], "dup_file": ""})
+ ome_response_mock.json_data = [{"Id": 1024,
+ "Name": "Servers"}]
+ ome_response_mock.success = False
+ data = self._run_module_with_fail_json(ome_default_args)
+ assert data["msg"] == "Unable to complete the operation because the entered target device group name(s)" \
+ " '{0}' are invalid.".format(",".join(set(["Servers"])))
+
+ def test_get_device_component_map(self, ome_connection_firmware_mock, ome_response_mock,
+ ome_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes',
+ return_value=['R9515PT', 2222, 1234, 'ABCD123'])
+ mocker.patch(MODULE_PATH + 'ome_firmware.get_device_ids',
+ return_value=([1234, 2222], {'1111': 'R9515PT', '1235': 'ABCD123'}))
+ output = {'1111': [], '1235': [], '2222': [], 1234: []}
+ f_module = self.get_module_mock(params={'device_service_tag': ['R9515PT'], 'device_id': [2222],
+ 'components': [],
+ 'devices': [{'id': 1234, 'components': []},
+ {'service_tag': "ABCD123", 'components': []}]})
+ data = self.module.get_device_component_map(ome_connection_firmware_mock, f_module)
+ assert 2222 in data
+
+ def test_main_firmware_success_case01(self, ome_default_args, mocker, ome_connection_firmware_mock):
+ ome_default_args.update({"device_id": Constants.device_id1, "device_service_tag": Constants.service_tag1,
+ "dup_file": ""})
+ mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes',
+ return_value=[Constants.device_id1, Constants.service_tag1])
+ mocker.patch(MODULE_PATH + 'ome_firmware.get_device_ids',
+ return_value=[Constants.device_id1, Constants.device_id2])
+ mocker.patch(MODULE_PATH + 'ome_firmware.upload_dup_file',
+ return_value=["SUCCESS", "token_id"])
+ mocker.patch(MODULE_PATH + 'ome_firmware.get_dup_applicability_payload',
+ return_value={"report_payload": "values"})
+ mocker.patch(MODULE_PATH + 'ome_firmware.get_applicable_components',
+ return_value="target_data")
+ mocker.patch(MODULE_PATH + 'ome_firmware.job_payload_for_update',
+ return_value={"job_payload": "values"})
+ mocker.patch(MODULE_PATH + 'ome_firmware.spawn_update_job',
+ return_value="Success")
+ data = self._run_module(ome_default_args)
+ assert data['changed'] is True
+ assert data['msg'] == "Successfully submitted the firmware update job."
+ assert data['update_status'] == "Success"
+
+ def test_main_firmware_success_case02(self, ome_default_args, mocker, ome_connection_firmware_mock):
+ ome_default_args.update({"baseline_name": "baseline_name"})
+ mocker.patch(MODULE_PATH + 'ome_firmware.validate_inputs')
+ mocker.patch(MODULE_PATH + 'ome_firmware.get_baseline_ids',
+ return_value=[1, 2])
+ mocker.patch(MODULE_PATH + 'ome_firmware.job_payload_for_update',
+ return_value={"job_payload": "values"})
+ mocker.patch(MODULE_PATH + 'ome_firmware.spawn_update_job',
+ return_value="Success")
+ mocker.patch(MODULE_PATH + 'ome_firmware.baseline_based_update',
+ return_value="target_data")
+ data = self._run_module(ome_default_args)
+ assert data['changed'] is True
+ assert data['msg'] == "Successfully submitted the firmware update job."
+ assert data['update_status'] == "Success"
+
+ def test_job_payload_for_update_case_01(self, ome_connection_firmware_mock):
+ """response None case"""
+ f_module = self.get_module_mock()
+ target_data = {}
+ ome_connection_firmware_mock.get_job_type_id.return_value = None
+ msg = "Unable to fetch the job type Id."
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.job_payload_for_update(ome_connection_firmware_mock, f_module, target_data)
+
+ def test_job_payload_for_update_case_02(self, ome_connection_firmware_mock, ome_response_mock):
+ """baseline case"""
+ f_module = self.get_module_mock(params={'schedule': 'RebootNow'})
+ target_data = {}
+ baseline = {"baseline_id": 1, "repo_id": 2, "catalog_id": 3}
+ ome_connection_firmware_mock.get_job_type_id.return_value = ome_response_mock
+ payload = self.module.job_payload_for_update(ome_connection_firmware_mock, f_module, target_data, baseline)
+ for item in payload["Params"]:
+ if item["Key"] == "complianceReportId":
+ assert item["Value"] == str(baseline["baseline_id"])
+ if item["Key"] == "repositoryId":
+ assert item["Value"] == str(baseline["repo_id"])
+ if item["Key"] == "catalogId":
+ assert item["Value"] == str(baseline["catalog_id"])
+
+ def test_job_payload_for_update_case_03(self, ome_connection_firmware_mock, ome_response_mock):
+ """response None case"""
+ f_module = self.get_module_mock(params={'schedule': 'RebootNow'})
+ target_data = {}
+ ome_connection_firmware_mock.get_job_type_id.return_value = ome_response_mock
+ payload = self.module.job_payload_for_update(ome_connection_firmware_mock, f_module, target_data)
+ for item in payload["Params"]:
+ if "JobId" in item:
+ assert item["JobId"] == 0
+ assert item["Key"] == "complianceUpdate"
+ assert item["Value"] == "false"
+
+ def test_get_baseline_ids_case01(self, ome_connection_firmware_mock, ome_response_mock):
+ response = {"report_list": [{"Name": "baseline_name", "Id": 1, "RepositoryId": 2, "CatalogId": 3}]}
+ ome_response_mock.json_data = response
+ ome_connection_firmware_mock.get_all_report_details.return_value = response
+ f_module = self.get_module_mock(params={'baseline_name': "baseline_name"})
+ baseline_detail = self.module.get_baseline_ids(ome_connection_firmware_mock, f_module)
+ assert baseline_detail["baseline_id"] == response["report_list"][0]["Id"]
+ assert baseline_detail["repo_id"] == response["report_list"][0]["RepositoryId"]
+ assert baseline_detail["catalog_id"] == response["report_list"][0]["CatalogId"]
+
+ def test_get_baseline_ids_case02(self, ome_connection_firmware_mock, ome_response_mock):
+ response = {"report_list": [{"Name": "baseline_name", "Id": 1, "RepositoryId": 2, "CatalogId": 3}]}
+ ome_response_mock.json_data = response
+ ome_connection_firmware_mock.get_all_report_details.return_value = response
+ f_module = self.get_module_mock(params={'baseline_name': "baseline_name2"})
+ with pytest.raises(Exception) as exc:
+ self.module.get_baseline_ids(ome_connection_firmware_mock, f_module)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target" \
+ " baseline name 'baseline_name2' is invalid."
+
+ def test_get_baseline_ids_case03(self, ome_connection_firmware_mock, ome_response_mock):
+ """Note: there is error in message format but UT message is updated as per module message"""
+ response = {"report_list": {}}
+ ome_response_mock.json_data = response
+ ome_connection_firmware_mock.get_all_report_details.return_value = response
+ f_module = self.get_module_mock(params={'baseline_name': "baseline_name2"})
+ with pytest.raises(Exception) as exc:
+ self.module.get_baseline_ids(ome_connection_firmware_mock, f_module)
+ assert exc.value.args[0] == "Unable to complete the operation because" \
+ " the entered target baseline name does not exist."
+
+ def test_baseline_based_update_exception_case_01(self, ome_connection_firmware_mock):
+ ome_connection_firmware_mock.get_all_report_details.return_value = {"report_list": []}
+ f_module = self.get_module_mock()
+ dev_comp_map = {}
+ with pytest.raises(Exception) as exc:
+ self.module.baseline_based_update(ome_connection_firmware_mock, f_module, {"baseline_id": 1}, dev_comp_map)
+ assert exc.value.args[0] == COMPLIANCE_READ_FAIL
+
+ def test_baseline_based_update_case_02(self, ome_connection_firmware_mock):
+ f_module = self.get_module_mock(params={'baseline_id': 1})
+ response = {"report_list": [
+ {"DeviceId": "1111", "DeviceTypeId": 2000, "DeviceName": "MX-111", "DeviceTypeName": "CHASSIS",
+ "ComponentComplianceReports": [{"UpdateAction": "UPGRADE", "SourceName": "SAS.xx.x2"}]}]}
+ ome_connection_firmware_mock.get_all_report_details.return_value = response
+ dev_comp_map = {}
+ compliance_report_list = self.module.baseline_based_update(ome_connection_firmware_mock, f_module,
+ {"baseline_id": 1}, dev_comp_map)
+ assert compliance_report_list == [
+ {'Id': "1111", 'Data': 'SAS.xx.x2', 'TargetType': {'Id': 2000, 'Name': 'CHASSIS'}}]
+
+ def test_baseline_based_update_case_03(self, ome_connection_firmware_mock):
+ f_module = self.get_module_mock(params={'baseline_id': 1})
+ response = {"report_list": [
+ {"DeviceId": 1111, "DeviceTypeId": 2000, "DeviceName": "MX-111", "DeviceTypeName": "CHASSIS",
+ "ComponentComplianceReports": []}]}
+ ome_connection_firmware_mock.get_all_report_details.return_value = response
+ dev_comp_map = {}
+ with pytest.raises(Exception, match=NO_CHANGES_MSG) as exc:
+ self.module.baseline_based_update(ome_connection_firmware_mock, f_module, {"baseline_id": 1}, dev_comp_map)
+
+ def test_validate_inputs(self):
+ f_module = self.get_module_mock(params={"dup_file": "/path/file.exe"})
+ msg = "Parameter 'dup_file' to be provided along with 'device_id'|'device_service_tag'|'device_group_names'"
+ with pytest.raises(Exception) as exc:
+ self.module.validate_inputs(f_module)
+ assert exc.value.args[0] == msg
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLValidationError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_firmware_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_firmware_mock, ome_response_mock):
+ ome_default_args.update(
+ {"device_id": Constants.device_id1, "device_service_tag": Constants.service_tag1, "dup_file": "duppath"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py
new file mode 100644
index 00000000..8af8d676
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py
@@ -0,0 +1,554 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_baseline
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+BASELINE_JOB_RUNNING = "Firmware baseline '{name}' with ID {id} is running. Please retry after job completion."
+MULTI_BASLEINES = "Multiple baselines present. Run the module again using a specific ID."
+BASELINE_DEL_SUCCESS = "Successfully deleted the firmware baseline."
+NO_CHANGES_MSG = "No changes found to be applied."
+INVALID_BASELINE_ID = "Invalid baseline ID provided."
+BASELINE_TRIGGERED = "Successfully triggered the firmware baseline task."
+NO_CATALOG_MESSAGE = "Catalog name not provided for baseline creation."
+NO_TARGETS_MESSAGE = "Targets not specified for baseline creation."
+CATALOG_STATUS_MESSAGE = "Unable to create the firmware baseline as the catalog is in {status} status."
+BASELINE_UPDATED = "Successfully {op} the firmware baseline."
+DISCOVER_JOB_COMPLETE = "Successfully completed the Discovery job."
+JOB_TRACK_SUCCESS = "Discovery job has {0}."
+JOB_TRACK_FAIL = "No devices discovered, job is in {0} state."
+SETTLING_TIME = 3
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline.'
+
+payload_out1 = {
+ "Name": "baseline1",
+ "Description": "baseline_description",
+ "CatalogId": 12,
+ "RepositoryId": 23,
+ "DowngradeEnabled": True,
+ "Is64Bit": True,
+ "Targets": [
+ {"Id": 123,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }}]
+}
+payload_out2 = {
+ "Name": "baseline1",
+ "CatalogId": 12,
+ "RepositoryId": 23, 'Description': None, 'DowngradeEnabled': True, 'Is64Bit': True,
+ "Targets": [
+ {"Id": 123,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }}]
+}
+
+baseline_status1 = {
+ "CatalogId": 123,
+ "Description": "BASELINE DESCRIPTION",
+ "DeviceComplianceReports": [],
+ "DowngradeEnabled": True,
+ "Id": 0,
+ "Is64Bit": True,
+ "Name": "my_baseline",
+ "RepositoryId": 123,
+ "RepositoryName": "catalog123",
+ "RepositoryType": "HTTP",
+ "Targets": [
+ {
+ "Id": 10083,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ },
+ {
+ "Id": 10076,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "TaskId": 11235,
+ "TaskStatusId": 0
+}
+
+
+@pytest.fixture
+def ome_connection_mock_for_firmware_baseline(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeFirmwareBaseline(FakeAnsibleModule):
+ module = ome_firmware_baseline
+
+ @pytest.fixture
+ def mock__get_catalog_payload(self, mocker):
+ mock_payload = mocker.patch(
+ MODULE_PATH + '_get_baseline_payload',
+ return_value={
+ "Name": "baseline_name",
+ "CatalogId": "cat_id",
+ "RepositoryId": "repo_id",
+ "Targets": {}
+ }
+ )
+ return mock_payload
+
+ catrepo_param1 = "catalog1"
+ catrepo_out1 = (22, 12)
+ catrepo_param2 = None
+ catrepo_out2 = (None, None)
+ catrepo_param3 = "catalog3"
+ catrepo_out3 = (None, None)
+
+ @pytest.mark.parametrize("params", [{"inp": catrepo_param1, "out": catrepo_out1},
+ {"inp": catrepo_param2, "out": catrepo_out2},
+ {"inp": catrepo_param3, "out": catrepo_out3}])
+ def test_get_catrepo_ids(self, ome_connection_mock_for_firmware_baseline,
+ ome_response_mock, params):
+ ome_connection_mock_for_firmware_baseline.get_all_items_with_pagination.return_value = {
+ "value": [
+ {
+ "Id": 22,
+ "Repository": {
+ "Id": 12,
+ "Name": "catalog1",
+ },
+ "Status": "Completed"
+ },
+ {
+ "Id": 23,
+ "Repository": {
+ "Id": 12,
+ "Name": "catalog2",
+ },
+ "Status": "Completed"
+ }
+ ]
+ }
+ f_module = self.get_module_mock(params=params["inp"])
+ catrepo = self.module.get_catrepo_ids(f_module, params["inp"], ome_connection_mock_for_firmware_baseline)
+ assert catrepo == params["out"]
+
+ @pytest.mark.parametrize("params", [{"mparams": {"state": "absent", "baseline_name": "my_baseline1"}, "res": [
+ {"Id": 12, "Name": "my_baseline1"}], "json_data": {
+ "value": [{"Id": 12, "Name": "my_baseline1"}]}, "success": True}, {
+ "mparams": {"state": "absent", "baseline_id": 12},
+ "res": [{"Id": 12, "Name": "my_baseline1"}],
+ "json_data": {"value": [{"Id": 11, "Name": "my_baseline2"},
+ {"Id": 12, "Name": "my_baseline1"}]}, "success": True}])
+ def test_check_existing_baseline(self, mocker, params, ome_connection_mock_for_firmware_baseline, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ ome_connection_mock_for_firmware_baseline.get_all_items_with_pagination.return_value = params['json_data']
+ f_module = self.get_module_mock(params=params["mparams"])
+ res = self.module.check_existing_baseline(f_module, ome_connection_mock_for_firmware_baseline)
+ assert res == params["res"]
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"Name": 'd1'}, 'job_failed': False, 'job_message': BASELINE_UPDATED.format(op='created'),
+ 'mparams': {'catalog_name': 'c1', 'device_ids': 123, 'job_wait': True, 'job_wait_timeout': 1000}},
+ {"json_data": {"Name": 'd1'}, 'job_failed': True, 'job_message': JOB_TRACK_FAIL,
+ 'mparams': {'catalog_name': 'c1', 'device_ids': 123, 'job_wait': True, 'job_wait_timeout': 1000}},
+ {"json_data": {"Name": 'd1'}, 'job_failed': True, 'job_message': BASELINE_TRIGGERED,
+ 'mparams': {'catalog_name': 'c1', 'device_ids': 123, 'job_wait': False, 'schedule': 'RunLater',
+ 'job_wait_timeout': 1000}}])
+ def test_create_baseline(self, params, mocker, ome_connection_mock_for_firmware_baseline, ome_response_mock):
+ mocker.patch(MODULE_PATH + '_get_baseline_payload', return_value={})
+ mocker.patch(MODULE_PATH + 'check_existing_baseline', return_value=[{"Id": 123}])
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ ome_connection_mock_for_firmware_baseline.job_tracking.return_value = \
+ (params['job_failed'], params['job_message'])
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params['mparams'])
+ error_message = params["job_message"]
+ with pytest.raises(Exception) as err:
+ self.module.create_baseline(f_module, ome_connection_mock_for_firmware_baseline)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"Name": 'd1', },
+ 'job_failed': False, 'job_message': BASELINE_UPDATED.format(op='modified'),
+ 'mparams': {"baseline_description": "new description", "baseline_name": "c4", "catalog_name": "baseline",
+ "device_service_tags": ["2H7HNX2", "2HB9NX2"], "downgrade_enabled": False, "is_64_bit": False,
+ "job_wait": True, "job_wait_timeout": 600, "new_baseline_name": "new name"},
+ "baseline_list": [{"CatalogId": 25, "Description": "", "DowngradeEnabled": True, "Id": 40, "Is64Bit": True,
+ "Name": "c4", "RepositoryId": 15,
+ "Targets": [{"Id": 13456, "Type": {"Id": 1000, "Name": "DEVICE"}},
+ {"Id": 13457, "Type": {"Id": 1000, "Name": "DEVICE"}}], "TaskId": 14465,
+ "TaskStatusId": 2010}],
+ "get_catrepo_ids": (12, 13), "get_target_list": [{"Id": 13456, "Type": {"Id": 1000, "Name": "DEVICE"}},
+ {"Id": 13457, "Type": {"Id": 1000, "Name": "DEVICE"}}]
+ },
+ {"json_data": {"Name": 'd1'}, 'job_failed': True, 'job_message': JOB_TRACK_FAIL,
+ 'mparams': {'catalog_name': 'c1', 'device_ids': 123, 'job_wait': True, 'job_wait_timeout': 1000},
+ "baseline_list": [{"Id": 12, "Name": "c1", "TaskStatusId": 2010, "TaskId": 12}], },
+ {"json_data": {"Name": 'd1'}, 'job_failed': True, 'job_message': BASELINE_TRIGGERED,
+ "baseline_list": [{"Id": 12, "Name": "c1", "TaskStatusId": 2010, "TaskId": 12}],
+ 'mparams': {'catalog_name': 'c1', 'device_ids': 123, 'job_wait': False, 'schedule': 'RunLater',
+ 'job_wait_timeout': 1000}}])
+ def test_modify_baseline(self, params, mocker, ome_connection_mock_for_firmware_baseline, ome_response_mock):
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ mocker.patch(MODULE_PATH + 'get_catrepo_ids', return_value=params.get('get_catrepo_ids', (12, 13)))
+ mocker.patch(MODULE_PATH + 'get_target_list', return_value=params.get('get_target_list', []))
+ ome_connection_mock_for_firmware_baseline.job_tracking.return_value = \
+ (params['job_failed'], params['job_message'])
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params['mparams'])
+ error_message = params["job_message"]
+ with pytest.raises(Exception) as err:
+ self.module.modify_baseline(f_module, ome_connection_mock_for_firmware_baseline, params['baseline_list'])
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {"state": "absent", "baseline_job_name": "my_baseline1"},
+ "baseline_list": [{"Id": 12, "Name": "my_baseline1", "TaskStatusId": 2010}],
+ "job_state_dict": {12: 2010}, "res": BASELINE_DEL_SUCCESS.format(n=1),
+ "json_data": 1, "success": True},
+ {"mparams": {"state": "absent", "baseline_job_name": "my_baseline1"},
+ "baseline_list": [{"Id": 12, "Name": "my_baseline1", "TaskStatusId": 2050, "TaskId": 12}],
+ "job_state_dict": {12: 2050},
+ "res": BASELINE_JOB_RUNNING.format(name='my_baseline1', id=12), "json_data": 1,
+ "success": True}])
+ def test_delete_baseline(self, mocker, params, ome_connection_mock_for_firmware_baseline, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.delete_baseline(f_module, ome_connection_mock_for_firmware_baseline, params['baseline_list'])
+ assert err.value.args[0] == error_message
+
+ def test_get_catrepo_ids_success(self, ome_connection_mock_for_firmware_baseline,
+ ome_response_mock):
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ catrepo = self.module.get_catrepo_ids(f_module, "catalog1", ome_connection_mock_for_firmware_baseline)
+ assert catrepo == (None, None)
+
+ inp_param1 = {"device_service_tags": ["R840PT3", "R940PT3"]}
+ out1 = [
+ {
+ "Id": 12,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ },
+ {
+ "Id": 23,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ]
+ inp_param2 = {"device_service_tags": ["R840PT3"]}
+ out2 = [{
+ "Id": 12,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }]
+
+ @pytest.mark.parametrize("params", [{"inp": inp_param1, "out": out1},
+ {"inp": inp_param2, "out": out2}])
+ def test_get_dev_ids(self, ome_connection_mock_for_firmware_baseline,
+ ome_response_mock, params):
+ f_module = self.get_module_mock(params=params["inp"])
+ ome_connection_mock_for_firmware_baseline.get_all_items_with_pagination.return_value = {
+ "value":
+ [
+ {
+ "Id": 12,
+ "Type": 1000,
+ "DeviceServiceTag": "R840PT3"
+ },
+ {
+ "Id": 23,
+ "Type": 1000,
+ "DeviceServiceTag": "R940PT3"
+ }
+ ]
+ }
+ targets = self.module.get_dev_ids(f_module, ome_connection_mock_for_firmware_baseline,
+ "device_service_tags", "DeviceServiceTag")
+ assert targets == params["out"]
+
+ grp_param1 = {"device_group_names": ["group1", "group2"]}
+ grp_out1 = [
+ {
+ "Id": 12,
+ "Type": {
+ "Id": 6000,
+ "Name": "GROUP"
+ }
+ },
+ {
+ "Id": 23,
+ "Type": {
+ "Id": 6000,
+ "Name": "GROUP"
+ }
+ }
+ ]
+ grp_param2 = {"device_group_names": ["group1"]}
+ grp_out2 = [
+ {
+ "Id": 12,
+ "Type": {
+ "Id": 6000,
+ "Name": "GROUP"
+ }
+ }
+ ]
+
+ @pytest.mark.parametrize("params", [{"inp": grp_param1, "out": grp_out1},
+ {"inp": grp_param2, "out": grp_out2}])
+ def test_get_group_ids(self, ome_connection_mock_for_firmware_baseline,
+ ome_response_mock, params):
+ f_module = self.get_module_mock(params=params["inp"])
+ ome_response_mock.success = True
+ ome_connection_mock_for_firmware_baseline.get_all_items_with_pagination.return_value = {
+ "value": [
+ {
+ "Id": 12,
+ "TypeId": 6000,
+ "Name": "group1"
+ },
+ {
+ "Id": 23,
+ "TypeId": 6000,
+ "Name": "group2"
+ }
+ ]
+ }
+ targets = self.module.get_group_ids(f_module, ome_connection_mock_for_firmware_baseline)
+ assert targets == params["out"]
+
+ payload_param1 = {"catalog_name": "cat1",
+ "baseline_name": "baseline1",
+ "baseline_description": "baseline_description",
+ "downgrade_enabled": True,
+ "is_64_bit": True}
+ payload_param2 = {"catalog_name": "cat1",
+ "baseline_name": "baseline1",
+ "baseline_description": None,
+ "downgrade_enabled": None,
+ "is_64_bit": None}
+
+ @pytest.mark.parametrize("params", [{"inp": payload_param1, "out": payload_out1},
+ {"inp": payload_param2, "out": payload_out2}])
+ def test__get_baseline_payload(self, ome_connection_mock_for_firmware_baseline, params, mocker):
+ f_module = self.get_module_mock(params=params["inp"])
+ mocker.patch(
+ MODULE_PATH + 'get_catrepo_ids',
+ return_value=(12, 23))
+ mocker.patch(
+ MODULE_PATH + 'get_target_list',
+ return_value=[{"Id": 123, "Type": {"Id": 1000, "Name": "DEVICE"}}])
+ payload = self.module._get_baseline_payload(f_module, ome_connection_mock_for_firmware_baseline)
+ assert payload == params["out"]
+
+ def test__get_baseline_payload_failure01(self, ome_default_args, ome_connection_mock_for_firmware_baseline, mocker):
+ f_module = self.get_module_mock(params={"catalog_name": "cat1",
+ "baseline_name": "baseline1"})
+ mocker.patch(
+ MODULE_PATH + 'get_catrepo_ids',
+ return_value=(None, None))
+ mocker.patch(
+ MODULE_PATH + 'get_target_list',
+ return_value=[{"Id": 123, "Type": {
+ "Id": 1000, "Name": "DEVICE"}}])
+ with pytest.raises(Exception) as exc:
+ self.module._get_baseline_payload(f_module, ome_connection_mock_for_firmware_baseline)
+ assert exc.value.args[0] == "No Catalog with name cat1 found"
+
+ def test__get_baseline_payload_failure02(self, ome_default_args, ome_connection_mock_for_firmware_baseline, mocker):
+ f_module = self.get_module_mock(params={"catalog_name": "cat1",
+ "baseline_name": "baseline1"})
+ mocker.patch(
+ MODULE_PATH + 'get_catrepo_ids',
+ return_value=(12, 23))
+ mocker.patch(
+ MODULE_PATH + 'get_target_list',
+ return_value=None)
+ with pytest.raises(Exception) as exc:
+ self.module._get_baseline_payload(f_module, ome_connection_mock_for_firmware_baseline)
+ assert exc.value.args[0] == NO_TARGETS_MESSAGE
+
+ target_param1 = {"device_ids": [12, 23]}
+ target_out1 = [
+ {
+ "Id": 12,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ },
+ {
+ "Id": 23,
+ "Type": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ]
+ target_param2 = {"x": 3}
+ target_out2 = None
+
+ @pytest.mark.parametrize("params", [{"inp": inp_param1, "out": out1},
+ {"inp": inp_param2, "out": out2},
+ {"inp": grp_param1, "out": grp_out1},
+ {"inp": grp_param2, "out": grp_out2},
+ {"inp": target_param1, "out": target_out1},
+ {"inp": target_param2, "out": target_out2}])
+ def test_get_target_list(self, ome_connection_mock_for_firmware_baseline, params, mocker):
+ f_module = self.get_module_mock(params=params["inp"])
+ mocker.patch(
+ MODULE_PATH + 'get_dev_ids',
+ return_value=params["out"])
+ mocker.patch(
+ MODULE_PATH + 'get_group_ids',
+ return_value=params["out"])
+ targets = self.module.get_target_list(f_module, ome_connection_mock_for_firmware_baseline)
+ assert targets == params["out"]
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [],
+ "mparams": {"state": "absent", "baseline_name": "b1", "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': NO_CHANGES_MSG, "success": True
+ },
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [{"name": "b1", "Id": 123, "TaskStatusId": 2060}], "check_mode": True,
+ "mparams": {"state": "absent", "baseline_id": 123, "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': "Changes found to be applied.", "success": True
+ },
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [], "check_mode": True,
+ "mparams": {"state": "present", "baseline_name": "b1", "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': "Changes found to be applied.", "success": True
+ }
+ ])
+ def test_main_success(self, params, ome_connection_mock_for_firmware_baseline, ome_default_args, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'check_existing_baseline', return_value=params.get("check_existing_baseline"))
+ mocker.patch(MODULE_PATH + '_get_baseline_payload', return_value=params.get("_get_baseline_payload"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ result = self._run_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result["msg"] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [], "check_mode": True,
+ "mparams": {"state": "present", "baseline_id": 123, "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': INVALID_BASELINE_ID, "success": True
+ },
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [{"Name": "b1", "Id": 123, "TaskStatusId": 2050, "TaskId": 2050}], "check_mode": True,
+ "mparams": {"state": "present", "baseline_id": 123, "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': "Firmware baseline 'b1' with ID 123 is running. Please retry after job completion.", "success": True
+ },
+ {"json_data": {"JobId": 1234},
+ "check_existing_baseline": [{"Name": "b1", "Id": 123, "TaskStatusId": 2060, "TaskId": 2050}],
+ "check_mode": True, "get_catrepo_ids": (None, None),
+ "mparams": {"state": "present", "baseline_id": 123, "device_ids": [12, 23], 'catalog_name': 'c1',
+ 'job_wait': False},
+ 'message': "No Catalog with name c1 found", "success": True
+ },
+ ])
+ def test_main_failure(self, params, ome_connection_mock_for_firmware_baseline, ome_default_args, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'check_existing_baseline', return_value=params.get("check_existing_baseline"))
+ mocker.patch(MODULE_PATH + '_get_baseline_payload', return_value=params.get("_get_baseline_payload"))
+ mocker.patch(MODULE_PATH + 'get_catrepo_ids', return_value=params.get("get_catrepo_ids"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == params['message']
+
+ def test_main_failure01(self, ome_connection_mock_for_firmware_baseline, ome_default_args, ome_response_mock,
+ mocker):
+ mocker.patch(
+ MODULE_PATH + '_get_baseline_payload',
+ return_value=payload_out1)
+ ome_response_mock.success = False
+ ome_response_mock.json_data = baseline_status1
+ ome_default_args.update({"baseline_name": "b1", "device_ids": [12, 23]})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["failed"] is True
+ assert 'msg' in result
+
+ def test_main_failure02(self, ome_connection_mock_for_firmware_baseline, ome_default_args, ome_response_mock,
+ mocker):
+ mocker.patch(
+ MODULE_PATH + '_get_baseline_payload',
+ return_value=payload_out1)
+ ome_response_mock.success = False
+ ome_response_mock.json_data = baseline_status1
+ ome_default_args.update({"baseline_name": "b1"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["failed"] is True
+ assert 'msg' in result
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_baseline_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_firmware_baseline, ome_response_mock):
+ ome_default_args.update({"state": "absent", "baseline_name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_existing_baseline', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_existing_baseline', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_existing_baseline',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
new file mode 100644
index 00000000..96672f6d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
@@ -0,0 +1,537 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_baseline_compliance_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, \
+ AnsibleFailJSonException, Constants
+
+
+@pytest.fixture
+def ome_connection_mock_for_firmware_baseline_compliance_info(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeFirmwareCatalog(FakeAnsibleModule):
+ module = ome_firmware_baseline_compliance_info
+
+ def test__get_device_id_from_service_tags_for_baseline_success_case(self, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
+ f_module = self.get_module_mock()
+ data = self.module._get_device_id_from_service_tags([Constants.service_tag1],
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert data == {Constants.device_id1: Constants.service_tag1}
+
+ def test__get_device_id_from_service_tags_empty_case(self, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": []}
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ data = self.module._get_device_id_from_service_tags([Constants.service_tag1],
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert exc.value.args[0] == "Unable to fetch the device information."
+
+ def test_get_device_id_from_service_tags_for_baseline_error_case(self,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.side_effect = HTTPError(
+ 'http://testhost.com', 400, '', {}, None)
+ f_module = self.get_module_mock()
+ with pytest.raises(HTTPError) as ex:
+ self.module._get_device_id_from_service_tags(["INVALID"],
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+
+ def test_get_device_id_from_service_tags_for_baseline_value_error_case(self,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": []}
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module._get_device_id_from_service_tags(["#$%^&"],
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert exc.value.args[0] == "Unable to fetch the device information."
+
+ def test_get_device_ids_from_group_ids_success_case(self, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.device_id1}]}
+ f_module = self.get_module_mock()
+ device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123", "345"],
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert device_ids == [Constants.device_id1, Constants.device_id1]
+
+ def test_get_device_ids_from_group_ids_empty_case(self, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {"report_list": []}
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123", "345"],
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert exc.value.args[0] == "Unable to fetch the device ids from specified device_group_names."
+
+ def test_get_device_ids_from_group_ids_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = HTTPError(
+ 'http://testhost.com', 400, '', {}, None)
+ f_module = self.get_module_mock()
+ with pytest.raises(HTTPError) as ex:
+ device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123456"],
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+
+ def test_get_device_ids_from_group_ids_value_error_case(self,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": []}
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_ids_from_group_ids(f_module, ["123456"],
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert exc.value.args[0] == "Unable to fetch the device ids from specified device_group_names."
+
+ def test_get_device_ids_from_group_names_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": [{"Name": "group1", "Id": 123}]}
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_ids',
+ return_value=[Constants.device_id1, Constants.device_id2])
+ f_module = self.get_module_mock(params={"device_group_names": ["group1", "group2"]})
+ device_ids = self.module.get_device_ids_from_group_names(f_module,
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert device_ids == [Constants.device_id1, Constants.device_id2]
+
+ def test_get_device_ids_from_group_names_empty_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": []}
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_ids',
+ return_value=[])
+ f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
+ with pytest.raises(Exception) as ex:
+ device_ids = self.module.get_device_ids_from_group_names(f_module,
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert ex.value.args[0] == "Unable to fetch the specified device_group_names."
+
+ def test_get_device_ids_from_group_names_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.side_effect = HTTPError(
+ 'http://testhost.com', 400, '', {}, None)
+ f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
+ with pytest.raises(HTTPError) as ex:
+ self.module.get_device_ids_from_group_names(f_module,
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+
+ def test_get_device_ids_from_group_names_value_error_case(self,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.return_value = {
+ "report_list": []}
+ f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_ids_from_group_names(f_module,
+ ome_connection_mock_for_firmware_baseline_compliance_info)
+ assert exc.value.args[0] == "Unable to fetch the specified device_group_names."
+
+ def test_get_identifiers_with_device_ids(self, ome_connection_mock_for_firmware_baseline_compliance_info,
+ module_mock, default_ome_args):
+ """when device_ids given """
+ f_module = self.get_module_mock(params={"device_ids": [Constants.device_id1, Constants.device_id2]})
+ identifiers, identifiers_type = self.module.get_identifiers(
+ ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert identifiers == [Constants.device_id1, Constants.device_id2]
+ assert identifiers_type == "device_ids"
+
+ def test_get_identifiers_with_service_tags(self, mocker, ome_connection_mock_for_firmware_baseline_compliance_info,
+ module_mock, default_ome_args):
+ """when service tags given """
+ f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]})
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info._get_device_id_from_service_tags',
+ return_value={Constants.device_id1: Constants.service_tag1})
+ identifiers, identifiers_type = self.module.get_identifiers(
+ ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert identifiers == [Constants.device_id1]
+ assert identifiers_type == "device_service_tags"
+
+ def test_get_identifiers_with_group_names(self, mocker, ome_connection_mock_for_firmware_baseline_compliance_info,
+ module_mock, default_ome_args):
+ """when service tags given """
+ f_module = self.get_module_mock(params={"device_group_names": [Constants.service_tag1]})
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_device_ids_from_group_names',
+ return_value=[123, 456])
+ identifiers, identifiers_type = self.module.get_identifiers(
+ ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert identifiers == [123, 456]
+ identifiers_type == "device_group_names"
+
+ def test_get_identifiers_with_service_tags_empty_case(self, mocker,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ module_mock, default_ome_args):
+ """when service tags given """
+ f_module = self.get_module_mock(params={"device_service_tags": [Constants.service_tag1]})
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info._get_device_id_from_service_tags',
+ return_value={})
+ identifiers, identifiers_type = self.module.get_identifiers(
+ ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert identifiers == []
+ assert identifiers_type == "device_service_tags"
+
+ def test_get_baseline_id_from_name_success_case(self, default_ome_args,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ module_mock, ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": [{"Name": "baseline_name1", "Id": 111}, {"Name": "baseline_name2",
+ "Id": 222}]}
+ f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
+ baseline_id = self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert baseline_id == 111
+
+ def test_get_baseline_id_from_name_when_name_not_exists(self, default_ome_args,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": [{"Name": "baseline_name1", "Id": 111}]}
+ f_module = self.get_module_mock(params={"baseline_name": "not_exits"})
+ with pytest.raises(AnsibleFailJSonException) as exc:
+ self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert exc.value.args[0] == "Specified baseline_name does not exist in the system."
+
+ def test_get_baseline_id_from_name_when_baseline_is_empty(self, default_ome_args,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": []}
+ f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
+ with pytest.raises(AnsibleFailJSonException) as exc:
+ self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert exc.value.args[0] == "No baseline exists in the system."
+
+ def test_get_baseline_id_from_name_when_baselinename_is_none(self, default_ome_args,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": []}
+ f_module = self.get_module_mock(params={"baseline_notexist": "data"})
+ with pytest.raises(AnsibleFailJSonException) as exc:
+ self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+ assert exc.value.args[0] == "baseline_name is a mandatory option."
+
+ def test_get_baseline_id_from_name_with_http_error_handlin_case(self,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = HTTPError(
+ 'http://testhost.com', 400, '', {}, None)
+ f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
+ with pytest.raises(HTTPError) as ex:
+ self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, SSLValidationError, ConnectionError, TypeError, ValueError, HTTPError])
+ def test_get_baseline_id_from_name_failure_case_01(self, exc_type,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = exc_type(
+ 'test')
+ else:
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = exc_type(
+ 'http://testhost.com', 400, '', {}, None)
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
+ with pytest.raises(exc_type) as ex:
+ self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
+
+ def test_get_baselines_report_by_device_ids_success_case(self, mocker,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
+ return_value=([Constants.device_id1], "device_ids"))
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+
+ def test_get_baselines_report_by_device_service_tag_not_exits_case(self, mocker,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
+ return_value=([], "device_service_tags"))
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ with pytest.raises(AnsibleFailJSonException) as exc:
+ self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert exc.value.args[0] == "Device details not available as the service tag(s) provided are invalid."
+
+ def test_get_baselines_report_by_group_names_not_exits_case(self, mocker,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
+ return_value=([], "device_group_names"))
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ with pytest.raises(AnsibleFailJSonException) as exc:
+ self.module.get_baselines_report_by_device_ids(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert exc.value.args[0] == "Device details not available as the group name(s) provided are invalid."
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def _test_get_baselines_report_by_device_ids_exception_handling(self, mocker, exc_type,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ """when invalid value for expose_durationis given """
+ err_dict = {"file": {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CUPD3090",
+ "RelatedProperties": [],
+ "Message": "Unable to retrieve baseline list either because the device "
+ "ID(s) entered are invalid, the ID(s) provided are not "
+ "associated with a baseline or a group is used as a target for "
+ "a baseline.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Make sure the entered device ID(s) are valid and retry the operation."
+ }
+ ]
+ }
+ }
+ }
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_identifiers',
+ return_value=([], "device_ids"))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type(
+ 'http://testhost.com', 400, '', err_dict, None)
+ f_module = self.get_module_mock()
+ with pytest.raises(exc_type):
+ self.module.get_baselines_report_by_device_ids(
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+
+ def test_get_baseline_compliance_reports_success_case_for_baseline_device(self, mocker, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
+ return_value=123)
+ f_module = self.get_module_mock(params={"baseline_name": "baseline1"})
+ ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": [{"baseline_device_report1": "data"}]}
+ data = self.module.get_baseline_compliance_reports(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+ assert data == [{"baseline_device_report1": "data"}]
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_get_baseline_compliance_reports_exception_handling_case(self, exc_type, mocker, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
+ side_effect=exc_type('exception message'))
+ else:
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ f_module = self.get_module_mock(params={"baseline_name": "baseline1"})
+ with pytest.raises(exc_type):
+ self.module.get_baseline_compliance_reports(ome_connection_mock_for_firmware_baseline_compliance_info,
+ f_module)
+
+ param_list1 = [{"baseline_name": ""},
+ {"baseline_name": None},
+ {"device_ids": []},
+ {"device_ids": None},
+ {"device_ids": [], "baseline_name": ""},
+ {"device_service_tags": []},
+ {"device_service_tags": [], "baseline_name": ""},
+ {"device_service_tags": None},
+ {"device_group_names": [], "baseline_name": ""},
+ {"device_group_names": []},
+ {"device_group_names": None},
+ {"device_ids": [], "device_service_tags": []},
+ {"device_ids": None, "device_service_tags": None},
+ {"device_ids": [], "device_service_tags": [], "device_group_names": []},
+ {"device_ids": None, "device_service_tags": None, "device_group_names": None},
+ {"device_ids": None, "device_service_tags": [], "device_group_names": None},
+ {"device_ids": [], "device_service_tags": [], "device_group_names": [], "baseline_name": ""},
+
+ ]
+
+ @pytest.mark.parametrize("param", param_list1)
+ def test_validate_input_error_handling_case(self, param):
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as exc:
+ self.module.validate_inputs(f_module)
+ assert exc.value.args[0] == "one of the following is required: device_ids, " \
+ "device_service_tags, device_group_names, baseline_name " \
+ "to generate device based compliance report."
+
+ params_list2 = [{
+ "device_ids": [Constants.device_id1],
+ "device_service_tags": [Constants.service_tag1]},
+ {"device_ids": [Constants.device_id1]},
+ {"device_group_names": ["group1"]},
+ {"device_service_tags": [Constants.service_tag1]},
+ {"baseline_name": "baseline1", "device_ids": [Constants.device_id1]},
+ {"baseline_name": "baseline1", "device_group_names": ["group1"]}
+ ]
+
+ @pytest.mark.parametrize("param", params_list2)
+ def test_validate_input_params_without_error_handling_case(self, param):
+ f_module = self.get_module_mock(params=param)
+ self.module.validate_inputs(f_module)
+
+ def test_baseline_complaince_main_success_case_01(self, mocker, ome_default_args, module_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
+ return_value=[{"device": "device_report"}])
+ ome_default_args.update({"device_ids": [Constants.device_id1]})
+ result = self._run_module(ome_default_args)
+ assert result["changed"] is False
+ assert 'baseline_compliance_info' in result
+ assert 'msg' not in result
+
+ def test_baseline_complaince_main_success_case_02(self, mocker, ome_default_args, module_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_compliance_reports',
+ return_value=[{"baseline_device": "baseline_device_report"}])
+ ome_default_args.update({"baseline_name": "baseline_name"})
+ result = self._run_module(ome_default_args)
+ assert result["changed"] is False
+ assert 'baseline_compliance_info' in result
+ assert 'msg' not in result
+
+ def test_baseline_complaince_main_failure_case_01(self, ome_default_args, module_mock):
+ """required parameter is not passed along with specified report_type"""
+ # ome_default_args.update({})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'baseline_compliance_info' not in result
+ assert 'msg' in result
+ assert result['msg'] == "one of the following is required: device_ids, " \
+ "device_service_tags, device_group_names, baseline_name"
+ assert result['failed'] is True
+
+ param_list4 = [
+ {"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1]},
+ {"device_service_tags": [Constants.device_id1], "device_group_names": ["group_name1"]},
+ {"device_ids": [Constants.device_id1], "device_group_names": ["group_name1"]},
+ {"device_ids": [Constants.device_id1], "device_service_tags": ["group_name1"]},
+ {"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1],
+ "device_group_names": ["group_name1"]},
+ {"device_ids": [Constants.device_id1], "device_service_tags": [Constants.service_tag1],
+ "device_group_names": ["group_name1"], "baseline_name": "baseline1"
+ },
+ {"device_ids": [Constants.device_id1], "baseline_name": "baseline1"},
+ {"device_service_tags": [Constants.service_tag1], "baseline_name": "baseline1"},
+ {"device_group_names": ["group_name1"], "baseline_name": "baseline1"},
+ {"device_ids": [], "device_service_tags": [],
+ "device_group_names": [], "baseline_name": ""
+ },
+ ]
+
+ @pytest.mark.parametrize("param", param_list4)
+ def test_baseline_complaince_main_failure_case_02(self, param, ome_default_args, module_mock):
+ """required parameter is not passed along with specified report_type"""
+ ome_default_args.update(param)
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'baseline_compliance_info' not in result
+ assert 'msg' in result
+ assert result["msg"] == "parameters are mutually exclusive: " \
+ "baseline_name|device_service_tags|device_ids|device_group_names"
+ assert result['failed'] is True
+
+ def test_baseline_complaince_main_failure_case_03(self, mocker, ome_default_args, module_mock, ome_response_mock,
+ ome_connection_mock_for_firmware_baseline_compliance_info):
+ """when ome response return value is None"""
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
+ return_value=None)
+ ome_default_args.update({"device_ids": [Constants.device_id1]})
+ result = self._run_module(ome_default_args)
+ assert 'baseline_compliance_info' not in result
+ assert result['msg'] == "Unable to fetch the compliance baseline information."
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_baseline_complaince_main_exception_handling_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_firmware_baseline_compliance_info,
+ ome_response_mock):
+ ome_default_args.update({"device_service_tags": [Constants.service_tag1]})
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.validate_inputs')
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'baseline_compliance_info' not in result
+ assert 'msg' in result
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'error_info' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py
new file mode 100644
index 00000000..6d394a1a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py
@@ -0,0 +1,136 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_baseline_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestOmeFirmwareBaselineInfo(FakeAnsibleModule):
+ module = ome_firmware_baseline_info
+
+ @pytest.fixture
+ def ome_connection_ome_firmware_baseline_info_mock(self, mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'ome_firmware_baseline_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+ def test_ome_firmware_baseline_info_main_success_case_01(self, mocker, ome_response_mock, ome_default_args,
+ module_mock,
+ ome_connection_ome_firmware_baseline_info_mock):
+ ome_response_mock.json_data = {"value": [{"baseline1": "data"}]}
+ result = self.execute_module(ome_default_args)
+ assert result["changed"] is False
+ assert 'baseline_info' in result
+ assert result['msg'] == "Successfully fetched firmware baseline information."
+ assert result['baseline_info'] == {"value": [{"baseline1": "data"}]}
+
+ def test_ome_firmware_baseline_info_main_success_case_02(self, mocker, ome_response_mock, ome_default_args,
+ module_mock,
+ ome_connection_ome_firmware_baseline_info_mock):
+ ome_response_mock.json_data = {"value": []}
+ result = self.execute_module(ome_default_args)
+ assert 'baseline_info' in result
+ assert result['baseline_info'] == []
+
+ def test_ome_firmware_baseline_info_main_success_case_03(self, mocker, ome_response_mock, ome_default_args,
+ module_mock,
+ ome_connection_ome_firmware_baseline_info_mock):
+ ome_default_args.update({"baseline_name": "baseline1"})
+ ome_response_mock.json_data = {"value": [{"Name": "baseline1", "data": "fake_data"}]}
+ mocker.patch(
+ MODULE_PATH + 'ome_firmware_baseline_info.get_specific_baseline',
+ return_value={"Name": "baseline1", "data": "fake_data"})
+ result = self.execute_module(ome_default_args)
+ assert result["changed"] is False
+ assert 'baseline_info' in result
+ assert result["baseline_info"] == {"Name": "baseline1", "data": "fake_data"}
+ assert result['msg'] == "Successfully fetched firmware baseline information."
+
+ def test_ome_firmware_baseline_info_main_success_case_04(self, mocker, ome_response_mock, ome_default_args,
+ module_mock,
+ ome_connection_ome_firmware_baseline_info_mock):
+ ome_default_args.update({"baseline_name": None})
+ ome_response_mock.json_data = {"value": []}
+ mocker.patch(
+ MODULE_PATH + 'ome_firmware_baseline_info.get_specific_baseline',
+ return_value={"baseline1": "fake_data"})
+ result = self.execute_module(ome_default_args)
+ assert result['baseline_info'] == []
+ assert result['msg'] == "No baselines present."
+
+ def test_ome_firmware_get_specific_baseline_case_01(self):
+ f_module = self.get_module_mock()
+ data = {"value": [{"Name": "baseline1", "data": "fakedata1"}, {"Name": "baseline2", "data": "fakedata2"}]}
+ val = self.module.get_specific_baseline(f_module, "baseline1", data)
+ assert val == {"Name": "baseline1", "data": "fakedata1"}
+
+ def test_ome_firmware_get_specific_baseline_case_02(self):
+ f_module = self.get_module_mock()
+ baseline_name = "baseline3"
+ msg = "Unable to complete the operation because the requested baseline with" \
+ " name '{0}' does not exist.".format(baseline_name)
+ data = {"value": [{"Name": "baseline1", "data": "fakedata1"}, {"Name": "baseline2", "data": "fakedata2"}]}
+ with pytest.raises(Exception) as exc:
+ self.module.get_specific_baseline(f_module, baseline_name, data)
+ assert exc.value.args[0] == msg
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_main_ome_firmware_baseline_info_failure_case1(self, exc_type, mocker, ome_default_args,
+ ome_connection_ome_firmware_baseline_info_mock,
+ ome_response_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type("TESTS")
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type("exception message")
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('http://testhost.com',
+ 400,
+ 'http error message',
+ {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert "error_info" in result
+ assert result['msg'] == 'HTTP Error 400: http error message'
+
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('http://testhost.com',
+ 404,
+ '<404 not found>',
+ {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert "error_info" not in result
+ assert result["msg"] == "404 Not Found.The requested resource is not available."
+ assert 'baseline_info' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py
new file mode 100644
index 00000000..c0f0a514
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py
@@ -0,0 +1,864 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ssl import SSLError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_catalog
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+import json
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_catalog.'
+
+NO_CHANGES_MSG = "No changes found to be applied."
+INVALID_CATALOG_ID = "Invalid catalog ID provided."
+CATALOG_DEL_SUCCESS = "Successfully deleted the firmware catalog."
+CATALOG_BASELINE_ATTACHED = "Unable to delete as catalog is associated with baseline(s)."
+CATALOG_JOB_RUNNING = "Catalog job '{name}' with ID {id} is running.Retry after job completion."
+CHECK_MODE_CHANGE_FOUND_MSG = "Changes found to be applied."
+CHECK_MODE_CHANGE_NOT_FOUND_MSG = "No changes found to be applied."
+INVALID_CATALOG_ID = "Invalid catalog ID provided."
+CATALOG_DEL_SUCCESS = "Successfully deleted the firmware catalog(s)."
+CATALOG_BASELINE_ATTACHED = "Unable to delete the catalog as it is with baseline(s)."
+CATALOG_EXISTS = "The catalog with the name '{new_name}' already exists in the system."
+DELL_ONLINE_EXISTS = "Catalog with 'DELL_ONLINE' repository already exists with the name '{catalog_name}'."
+NAMES_ERROR = "Only delete operations accept multiple catalog names or IDs."
+CATALOG_ID_NOT_FOUND = "Catalog with ID '{catalog_id}' not found."
+CATALOG_NAME_NOT_FOUND = "Catalog '{catalog_name}' not found."
+CATALOG_UPDATED = "Successfully {operation} the firmware catalog."
+
+catalog_info = {
+ "@odata.context": "/api/$metadata#Collection(UpdateService.Catalogs)",
+ "@odata.count": 3,
+ "value": [
+ {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(29)",
+ "Id": 29,
+ "Filename": "catalog.gz",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Failed",
+ "TaskId": 21448,
+ "BaseLocation": None,
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "AssociatedBaselines": ["abc"],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 19,
+ "Name": "catalog_http3",
+ "Description": "catalog desc3",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "HTTP"
+ }
+ },
+ {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(30)",
+ "Id": 30,
+ "Filename": "catalog.gz",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Failed",
+ "BaseLocation": None,
+ "TaskId": 21449,
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 20,
+ "Name": "catalog_http4",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "HTTP"
+ }
+ },
+ {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ ]
+}
+
+catalog_resp = {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+}
+
+
+@pytest.fixture
+def ome_connection_catalog_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeFirmwareCatalog(FakeAnsibleModule):
+ module = ome_firmware_catalog
+
+ @pytest.fixture
+ def mock__get_catalog_payload(self, mocker):
+ mock_payload = mocker.patch(
+ MODULE_PATH + '_get_catalog_payload',
+ return_value={"Repistory": "Dummy val"})
+ return mock_payload
+
+ def test_ome_catalog_firmware_main_ome_firmware_catalog_no_mandatory_arg_passed_failuer_case(self, ome_default_args,
+ module_mock,
+ mock__get_catalog_payload,
+ ome_connection_catalog_mock):
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'catalog_status' not in result
+
+ inp_param1 = {"hostname": "host ip", "username": "username",
+ "password": "password", "port": 443, "catalog_name": ["catalog_name"]}
+ inp_param2 = {"hostname": "host ip", "username": "username",
+ "password": "password", "port": 443, "catalog_name": ["catalog_name"], "catalog_description": "desc",
+ "source": "10.255.2.128:2607", "source_path": "source_path", "file_name": "file_name",
+ "repository_type": "HTTPS",
+ "repository_username": "repository_username",
+ "repository_password": "repository_password",
+ "repository_domain": "repository_domain",
+ "check_certificate": True}
+ inp_param3 = {"hostname": "host ip", "username": "username",
+ "password": "password", "port": 443, "catalog_name": " ", "catalog_description": None}
+ inp_param4 = {"hostname": "host ip", "username": "username",
+ "password": "password", "port": 443, "catalog_name": ["catalog_name"], "catalog_description": "desc",
+ "source": "10.255.2.128:2607", "source_path": "source_path", "file_name": "file_name",
+ "repository_type": "DELL_ONLINE",
+ "repository_username": "repository_username",
+ "repository_password": "repository_password",
+ "repository_domain": "repository_domain",
+ "check_certificate": True}
+ inp_param5 = {"hostname": "host ip", "username": "username",
+ "password": "password", "port": 443, "catalog_name": ["catalog_name"], "catalog_description": "desc",
+ "source_path": "source_path", "file_name": "file_name",
+ "repository_type": "DELL_ONLINE",
+ "repository_username": "repository_username",
+ "repository_password": "repository_password",
+ "repository_domain": "repository_domain",
+ "check_certificate": True}
+ out1 = {"Repository": {"Name": "catalog_name"}}
+ out2 = {'Filename': 'file_name', 'SourcePath': 'source_path',
+ 'Repository': {'Name': 'catalog_name', 'Description': 'desc',
+ 'Source': '10.255.2.128:2607', 'RepositoryType': 'HTTPS', 'Username': 'repository_username',
+ 'Password': 'repository_password', 'DomainName': 'repository_domain',
+ 'CheckCertificate': True}}
+
+ out3 = {"Repository": {"Name": " "}}
+ out4 = {'Filename': 'file_name', 'SourcePath': 'source_path',
+ 'Repository': {'Name': 'catalog_name', 'Description': 'desc',
+ 'Source': '10.255.2.128:2607', 'RepositoryType': 'DELL_ONLINE',
+ 'CheckCertificate': True}}
+ out5 = {'Filename': 'file_name', 'SourcePath': 'source_path',
+ 'Repository': {'Name': 'catalog_name', 'Description': 'desc',
+ 'Source': 'downloads.dell.com', 'RepositoryType': 'DELL_ONLINE',
+ 'CheckCertificate': True}}
+
+ @pytest.mark.parametrize("params", [{"inp": inp_param1, "out": out1},
+ {"inp": inp_param2, "out": out2},
+ {"inp": inp_param3, "out": out3}
+ ])
+ def test_ome_catalog_firmware__get_catalog_payload_success_case(self, params):
+ payload = self.module._get_catalog_payload(params["inp"], params["inp"]["catalog_name"][0])
+ assert payload == params["out"]
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_catalog_firmware_ome_catalog_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_catalog_mock,
+ ome_response_mock):
+ ome_default_args.update({"state": "absent", "catalog_name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_existing_catalog', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_existing_catalog', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_existing_catalog',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ @pytest.mark.parametrize("params", [{"state": "present", "catalog_name": ["catalog_online2"]},
+ {"state": "present", "catalog_id": [34]}])
+ def test_ome_catalog_firmware_check_existing_catalog_case01(self, params, ome_connection_catalog_mock):
+ ome_connection_catalog_mock.get_all_items_with_pagination.return_value = {"value": catalog_info["value"]}
+ f_module = self.get_module_mock(params=params)
+ catalog, all_catalog = self.module.check_existing_catalog(f_module, ome_connection_catalog_mock,
+ params["state"])
+ assert catalog[0] == {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ assert all_catalog == {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"}
+
+ @pytest.mark.parametrize("params",
+ [{"state": "absent", "catalog_name": ["catalog_online2", "catalog_http4"]},
+ {"state": "absent", "catalog_id": [34, 30]}])
+ def test_ome_catalog_firmware_check_existing_catalog_case02(self, params, ome_connection_catalog_mock):
+ ome_connection_catalog_mock.get_all_items_with_pagination.return_value = {"value": catalog_info["value"]}
+ f_module = self.get_module_mock(params=params)
+ catalog, all_catalog = self.module.check_existing_catalog(f_module, ome_connection_catalog_mock,
+ params["state"])
+ assert catalog == [
+ {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(30)",
+ "Id": 30,
+ "Filename": "catalog.gz",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Failed",
+ "BaseLocation": None,
+ "TaskId": 21449,
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 20,
+ "Name": "catalog_http4",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "HTTP"
+ }
+ },
+ {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ ]
+ assert all_catalog == {}
+
+ @pytest.mark.parametrize("params", [{"state": "present", "catalog_name": ["catalog_online2"]}])
+ def test_ome_catalog_firmware_check_existing_catalog_case03(self, params, ome_connection_catalog_mock):
+ ome_connection_catalog_mock.get_all_items_with_pagination.return_value = {"value": catalog_info["value"]}
+ f_module = self.get_module_mock(params=params)
+ catalog, all_catalog = self.module.check_existing_catalog(f_module, ome_connection_catalog_mock,
+ params["state"],
+ "catalog_online2")
+ assert catalog[0] == {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ assert all_catalog == {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"}
+
+ def test_ome_catalog_firmware_get_updated_catalog_info(self, ome_connection_catalog_mock):
+ resp = {
+ "@odata.type": "#UpdateService.Catalogs",
+ "@odata.id": "/api/UpdateService/Catalogs(34)",
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Status": "Completed",
+ "TaskId": 21453,
+ "BaseLocation": "downloads.dell.com",
+ "Schedule": {
+ "StartTime": None,
+ "EndTime": None,
+ "Cron": "startnow"
+ },
+ "BundlesCount": 173,
+ "PredecessorIdentifier": "aaaaaa",
+ "AssociatedBaselines": [],
+ "Repository": {
+ "@odata.type": "#UpdateService.Repository",
+ "Id": 24,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "Source": "downloads.dell.com",
+ "DomainName": None,
+ "Username": None,
+ "Password": None,
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ f_module = self.get_module_mock(params={"state": "present", "catalog_name": "catalog_online2"})
+ ome_connection_catalog_mock.get_all_items_with_pagination.return_value = {"value": catalog_info["value"]}
+ catalog = self.module.get_updated_catalog_info(f_module, ome_connection_catalog_mock, resp)
+ assert catalog == resp
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {"state": "present", "job_wait_timeout": 10, "job_wait": True,
+ "catalog_name": ["catalog_online2"]}}])
+ @pytest.mark.parametrize("action",
+ ["created", "modified"])
+ def test_ome_catalog_firmware_exit_catalog(self, mocker, ome_connection_catalog_mock, params, action):
+ ome_connection_catalog_mock.job_tracking.return_value = False, "message"
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ f_module = self.get_module_mock(params=params["mparams"])
+ mocker.patch(MODULE_PATH + 'get_updated_catalog_info', return_value=catalog_resp)
+ msg = CATALOG_UPDATED.format(operation=action)
+ with pytest.raises(Exception) as err:
+ self.module.exit_catalog(f_module, ome_connection_catalog_mock, catalog_resp, action, msg)
+ assert err.value.args[0] == msg
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {"state": "present", "job_wait_timeout": 10, "job_wait": False,
+ "catalog_name": ["catalog_online2"]}}])
+ @pytest.mark.parametrize("action",
+ ["created", "modified"])
+ def test_ome_catalog_firmware_exit_catalog2(self, mocker, ome_connection_catalog_mock, params, action):
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ f_module = self.get_module_mock(params=params["mparams"])
+ mocker.patch(MODULE_PATH + 'get_updated_catalog_info', return_value=catalog_resp)
+ msg = CATALOG_UPDATED.format(operation=action)
+ with pytest.raises(Exception) as err:
+ self.module.exit_catalog(f_module, ome_connection_catalog_mock, catalog_resp, action, msg)
+ assert err.value.args[0] == msg
+
+ def test_ome_catalog_firmware_validate_dell_online_case01(self):
+ all_catalog = {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"}
+ f_module = self.get_module_mock(params={"catalog_name": ["catalog_online2"]})
+ self.module.validate_dell_online(all_catalog, f_module)
+
+ def test_ome_catalog_firmware_validate_dell_online_case02(self):
+ all_catalog = {"catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"}
+ f_module = self.get_module_mock(params={"catalog_name": ["catalog_online2"]})
+ self.module.validate_dell_online(all_catalog, f_module)
+
+ def test_ome_catalog_firmware_validate_dell_online_case03(self):
+ all_catalog = {"catalog_online3": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"}
+ f_module = self.get_module_mock(params={"catalog_name": ["catalog_online2"]})
+ with pytest.raises(Exception) as err:
+ self.module.validate_dell_online(all_catalog, f_module)
+ assert err.value.args[0] == DELL_ONLINE_EXISTS.format(catalog_name="catalog_online3")
+
+ def test_ome_catalog_firmware_create_catalog(self, mocker, ome_response_mock, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(params={"catalog_name": ["catalog_name"]})
+ ome_response_mock.json_data = catalog_resp
+ mocker.patch(MODULE_PATH + 'exit_catalog', return_value=catalog_resp)
+ self.module.create_catalog(f_module, ome_connection_catalog_mock)
+
+ def test_ome_catalog_firmware_get_current_catalog_settings(self):
+ payload = self.module.get_current_catalog_settings(catalog_resp)
+ assert payload == {'Filename': 'catalog.xml', 'SourcePath': 'catalog/catalog.gz',
+ 'Repository': {'Name': 'catalog_online2', 'Id': 24, 'Description': 'catalog desc4',
+ 'RepositoryType': 'DELL_ONLINE', 'Source': 'downloads.dell.com',
+ 'CheckCertificate': False}}
+
+ def test_ome_catalog_firmware_modify_catalog_case01(self, mocker, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_online2"], "new_catalog_name": "catalog_http3"})
+ modify_payload = {
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ }
+ }
+ mocker.patch(MODULE_PATH + '_get_catalog_payload', return_value=modify_payload)
+ with pytest.raises(Exception) as err:
+ self.module.modify_catalog(f_module, ome_connection_catalog_mock, [catalog_resp],
+ {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"})
+ assert err.value.args[0] == CATALOG_EXISTS.format(new_name="catalog_http3")
+
+ def test_ome_catalog_firmware_modify_catalog_case02(self, mocker, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_online2"], "new_catalog_name": "catalog_http10"})
+ modify_payload = {
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ "RepositoryType": "NFS"
+ }
+ }
+ current_payload = {
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Id": 11,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ mocker.patch(MODULE_PATH + '_get_catalog_payload', return_value=modify_payload)
+ mocker.patch(MODULE_PATH + 'get_current_catalog_settings', return_value=current_payload)
+ with pytest.raises(Exception) as err:
+ self.module.modify_catalog(f_module, ome_connection_catalog_mock, [catalog_resp],
+ {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"})
+ assert err.value.args[0] == "Repository type cannot be changed to another repository type."
+
+ def test_ome_catalog_firmware_modify_catalog_case03(self, mocker, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_online2"], "new_catalog_name": "catalog_http10"}, check_mode=True)
+ modify_payload = {
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": True,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ # current_payload = {
+ # "Id": 34,
+ # "Filename": "catalog.xml",
+ # "SourcePath": "catalog/catalog.gz",
+ # "Repository": {
+ # "Id": 11,
+ # "Name": "catalog_online2",
+ # "Description": "catalog desc4",
+ # "CheckCertificate": True,
+ # "RepositoryType": "DELL_ONLINE"
+ # }
+ # }
+ mocker.patch(MODULE_PATH + '_get_catalog_payload', return_value=modify_payload)
+ with pytest.raises(Exception) as err:
+ self.module.modify_catalog(f_module, ome_connection_catalog_mock, [catalog_resp],
+ {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"})
+ assert err.value.args[0] == CHECK_MODE_CHANGE_FOUND_MSG
+
+ @pytest.mark.parametrize("check_mode", [True, False])
+ def test_ome_catalog_firmware_modify_catalog_case04(self, check_mode, mocker, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_online2"], "new_catalog_name": "catalog_online2"}, check_mode=check_mode)
+ modify_payload = {
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ current_payload = {
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Id": 11,
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ mocker.patch(MODULE_PATH + '_get_catalog_payload', return_value=modify_payload)
+ mocker.patch(MODULE_PATH + 'get_current_catalog_settings', return_value=current_payload)
+ with pytest.raises(Exception) as err:
+ self.module.modify_catalog(f_module, ome_connection_catalog_mock, [catalog_resp],
+ {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"})
+ assert err.value.args[0] == CHECK_MODE_CHANGE_NOT_FOUND_MSG
+
+ def test_ome_catalog_firmware_modify_catalog_case05(self, mocker, ome_connection_catalog_mock, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_online2"], "new_catalog_name": "catalog_http10"}, check_mode=False)
+ modify_payload = {
+ "Id": 34,
+ "Filename": "catalog.xml",
+ "SourcePath": "catalog/catalog.gz",
+ "Repository": {
+ "Name": "catalog_online2",
+ "Description": "catalog desc4",
+ "CheckCertificate": False,
+ "RepositoryType": "DELL_ONLINE"
+ }
+ }
+ mocker.patch(MODULE_PATH + '_get_catalog_payload', return_value=modify_payload)
+ ome_response_mock.json_data = catalog_resp
+ mocker.patch(MODULE_PATH + 'exit_catalog', return_value=None)
+ self.module.modify_catalog(f_module, ome_connection_catalog_mock, [catalog_resp],
+ {"catalog_online2": "DELL_ONLINE", "catalog_http4": "HTTP",
+ "catalog_http3": "HTTP"})
+
+ def test_ome_catalog_firmware_validate_delete_operation_case1(self, ome_response_mock, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_http3", "catalog_online2"]}, check_mode=False)
+ ome_response_mock.json_data = {
+ "@odata.context": "/api/$metadata#JobService.Job",
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10025)",
+ "Id": 10025,
+ "JobName": "Default Console Update Execution Task",
+ "JobDescription": "Default Console Update Execution Task",
+ "State": "Enabled",
+ "CreatedBy": "system",
+ "Targets": [],
+ "Params": [],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2051,
+ "Name": "NotRun"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 124,
+ "Name": "ConsoleUpdateExecution_Task",
+ "Internal": False
+ },
+ "JobStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2080,
+ "Name": "New"
+ },
+ }
+ with pytest.raises(Exception) as err:
+ self.module.validate_delete_operation(ome_connection_catalog_mock, f_module, catalog_info["value"], [1, 2])
+ assert err.value.args[0] == CATALOG_BASELINE_ATTACHED
+
+ def test_ome_catalog_firmware_validate_delete_operation_case2(self, ome_response_mock, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_http3", "catalog_online2"]}, check_mode=True)
+ ome_response_mock.json_data = {
+ "@odata.context": "/api/$metadata#JobService.Job",
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10025)",
+ "Id": 10025,
+ "JobName": "Default Console Update Execution Task",
+ "JobDescription": "Default Console Update Execution Task",
+ "State": "Enabled",
+ "CreatedBy": "system",
+ "Targets": [],
+ "Params": [],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2051,
+ "Name": "NotRun"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 124,
+ "Name": "ConsoleUpdateExecution_Task",
+ "Internal": False
+ },
+ "JobStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2080,
+ "Name": "New"
+ },
+ }
+ catalog_info1 = [catalog_resp]
+ with pytest.raises(Exception) as err:
+ self.module.validate_delete_operation(ome_connection_catalog_mock, f_module, catalog_info1, [34])
+ assert err.value.args[0] == CHECK_MODE_CHANGE_FOUND_MSG
+
+ def test_ome_catalog_firmware_validate_delete_operation_case3(self, ome_response_mock, ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_http3", "catalog_online2"]}, check_mode=False)
+ ome_response_mock.json_data = {
+ "@odata.context": "/api/$metadata#JobService.Job",
+ "@odata.type": "#JobService.Job",
+ "@odata.id": "/api/JobService/Jobs(10025)",
+ "Id": 10025,
+ "JobName": "Default Console Update Execution Task",
+ "JobDescription": "Default Console Update Execution Task",
+ "State": "Enabled",
+ "CreatedBy": "system",
+ "Targets": [],
+ "Params": [],
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2051,
+ "Name": "NotRun"
+ },
+ "JobType": {
+ "@odata.type": "#JobService.JobType",
+ "Id": 124,
+ "Name": "ConsoleUpdateExecution_Task",
+ "Internal": False
+ },
+ "JobStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2080,
+ "Name": "New"
+ },
+ }
+ catalog_info1 = [catalog_resp]
+ self.module.validate_delete_operation(ome_connection_catalog_mock, f_module, catalog_info1, [34])
+
+ @pytest.mark.parametrize("params", [
+ {"fail_json": True, "json_data": {"JobId": 1234},
+ "check_existing_catalog": ([], []),
+ "mparams": {"state": "present", "job_wait_timeout": 10, "job_wait": False,
+ "catalog_id": 12, "repository_type": "DELL_ONLINE"},
+ 'message': INVALID_CATALOG_ID, "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "check_existing_catalog": ([], []), "check_mode": True,
+ "mparams": {"state": "present", "job_wait_timeout": 10, "job_wait": False,
+ "catalog_name": "c1", "repository_type": "HTTPS"},
+ 'message': CHECK_MODE_CHANGE_FOUND_MSG, "success": True
+ }
+ ])
+ def test_main(self, params, ome_connection_catalog_mock, ome_default_args, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'check_existing_catalog', return_value=params.get("check_existing_catalog"))
+ # mocker.patch(MODULE_PATH + '_get_baseline_payload', return_value=params.get("_get_baseline_payload"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ if params.get("fail_json", False):
+ result = self._run_module_with_fail_json(ome_default_args)
+ else:
+ result = self._run_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result["msg"] == params['message']
+
+ @pytest.mark.parametrize("check_mode", [True, False])
+ def test_ome_catalog_firmware_validate_delete_operation_case4(self, check_mode, ome_response_mock,
+ ome_connection_catalog_mock):
+ f_module = self.get_module_mock(
+ params={"catalog_name": ["catalog_http3", "catalog_online2"]}, check_mode=check_mode)
+ with pytest.raises(Exception) as err:
+ self.module.validate_delete_operation(ome_connection_catalog_mock, f_module, [], [])
+ assert err.value.args[0] == CHECK_MODE_CHANGE_NOT_FOUND_MSG
+
+ def test_ome_catalog_firmware_delete_catalog(self, mocker, ome_connection_catalog_mock, ome_response_mock):
+ mocker.patch(MODULE_PATH + 'validate_delete_operation', return_value=None)
+ ome_response_mock.json_data = [1, 2]
+ f_module = self.get_module_mock(params={"state": "absent", "catalog_id": [1, 2]})
+ with pytest.raises(Exception) as err:
+ self.module.delete_catalog(f_module, ome_connection_catalog_mock, catalog_info["value"])
+ assert err.value.args[0] == CATALOG_DEL_SUCCESS
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_catalog_firmware_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_catalog_mock, ome_response_mock):
+ ome_default_args.update({"catalog_name": "catalog1", "repository_type": "HTTPS"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'validate_names', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'validate_names', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'validate_names',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ @pytest.mark.parametrize("param", [{"state": "absent", "catalog_id": [1, 2]},
+ {"state": "absent", "catalog_name": ["abc", "xyz"]}])
+ def test_ome_catalog_firmware_validate_names(self, param):
+ f_module = self.get_module_mock(params=param)
+ self.module.validate_names("absent", f_module)
+
+ @pytest.mark.parametrize("param", [{"state": "present", "catalog_id": [1, 2]},
+ {"state": "present", "catalog_name": ["abc", "xyz"]}])
+ def test_ome_catalog_firmware_validate_names_exception_case(self, param):
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.validate_names("present", f_module)
+ assert err.value.args[0] == NAMES_ERROR
+
+ def test_ome_catalog_firmware_argument_exception_case1(self, ome_default_args):
+ ome_default_args.update({"catalog_name": "t1"})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "state is present but all of the following are missing: repository_type"
+
+ def test_ome_catalog_firmware_argument_exception_case2(self, ome_default_args):
+ ome_default_args.update({"catalog_id": 1})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "state is present but all of the following are missing: repository_type"
+
+ def test_ome_catalog_firmware_argument_exception_case3(self, ome_default_args):
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "one of the following is required: catalog_name, catalog_id"
+
+ def test_ome_catalog_firmware_argument_exception_case4(self, ome_default_args):
+ ome_default_args.update({"repository_type": "HTTPS", "catalog_name": "t1", "catalog_id": 1})
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result["msg"] == "parameters are mutually exclusive: catalog_name|catalog_id"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py
new file mode 100644
index 00000000..6aede932
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py
@@ -0,0 +1,274 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.5.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_groups
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MULTIPLE_GROUPS_MSG = "Provide only one unique device group when state is present."
+NONEXIST_GROUP_ID = "A device group with the provided ID does not exist."
+NONEXIST_PARENT_ID = "A parent device group with the provided ID does not exist."
+INVALID_PARENT = "The provided parent device group is not a valid user-defined static device group."
+INVALID_GROUPS_DELETE = "Provide valid static device group(s) for deletion."
+INVALID_GROUPS_MODIFY = "Provide valid static device group for modification."
+PARENT_CREATION_FAILED = "Unable to create a parent device group with the name {pname}."
+PARENT_IN_SUBTREE = "The parent group is already under the provided group."
+CREATE_SUCCESS = "Successfully {op}d the device group."
+GROUP_PARENT_SAME = "Provided parent and the device group cannot be the same."
+DELETE_SUCCESS = "Successfully deleted the device group(s)."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+STATIC_ROOT = 'Static Groups'
+SETTLING_TIME = 2
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_groups.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_groups(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.ome_groups.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeGroups(FakeAnsibleModule):
+ module = ome_groups
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}]},
+ 'message': DELETE_SUCCESS, "success": True, 'mparams': {'name': 'g1', 'state': 'absent'}},
+ {"json_data": {"value": [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}]},
+ 'message': DELETE_SUCCESS, "success": True, 'mparams': {'name': 'g1', 'state': 'absent'}},
+ {"json_data": {"value": [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}]},
+ 'message': CHANGES_FOUND, "success": True, 'mparams': {'group_id': 24, 'state': 'absent'}, 'check_mode': True},
+ {"json_data": {"value": [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}]},
+ 'message': NO_CHANGES_MSG, "success": True, 'mparams': {'name': 'g2', 'state': 'absent'}},
+ {"json_data": {"value": [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}]},
+ 'message': NO_CHANGES_MSG, "success": True, 'mparams': {'name': 'g2', 'state': 'absent'}, 'check_mode': True}])
+ def test_ome_groups_delete(self, params, ome_connection_mock_for_groups, ome_response_mock, ome_default_args,
+ module_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_groups.get_all_items_with_pagination.return_value = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "value": [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'gp1', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}},
+ {"json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'gp21', 'description': 'My group described'}, 'return_data': 22,
+ 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}},
+ {"json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_id': 25, 'description': 'My group described'}, 'return_data': 22,
+ 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}},
+ {"json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'Static Groups', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 1, 'MembershipTypeId': 12}},
+ {"json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_id': 1, 'description': 'My group described'}, 'return_data': 22,
+ 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 1, 'MembershipTypeId': 12}},
+ {"json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CHANGES_FOUND, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'gp21', 'description': 'My group described'}, 'return_data': 22,
+ 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}, 'check_mode': True}])
+ def test_ome_groups_create(self, params, ome_connection_mock_for_groups, ome_response_mock, ome_default_args,
+ module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['return_data']
+ ome_connection_mock_for_groups.get_all_items_with_pagination.return_value = params['json_data']
+ ome_connection_mock_for_groups.strip_substr_dict.return_value = params.get('created_group', {})
+ mocker.patch(MODULE_PATH + 'get_ome_group_by_id', return_value=params.get('created_group', {}))
+ mocker.patch(MODULE_PATH + 'create_parent', return_value=params['created_group'].get('ParentId'))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == (params['message']).format(op='create')
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ 'value': [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12, 'description': 'My group described'},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CREATE_SUCCESS, "success": True,
+ 'mparams': {'name': 'g1', 'new_name': 'j1', 'parent_group_name': 'gp1', 'description': 'description modified'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'ParentId': 25, 'MembershipTypeId': 12,
+ 'description': 'My group described'},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': CHANGES_FOUND, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'gp1', 'description': 'description modified'}, 'return_data': 22,
+ 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}, 'check_mode': True}, {
+ "json_data": {'value': [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'ParentId': 25, 'MembershipTypeId': 12,
+ 'Description': 'My group described'},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': NO_CHANGES_MSG, "success": True,
+ 'mparams': {'name': 'g1', 'new_name': 'g1', 'parent_group_name': 'gp1', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 24, 'ParentId': 25, 'MembershipTypeId': 12},
+ 'check_mode': True}, ])
+ def test_ome_groups_modify(self, params, ome_connection_mock_for_groups, ome_response_mock, ome_default_args,
+ module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['return_data']
+ ome_connection_mock_for_groups.get_all_items_with_pagination.return_value = params['json_data']
+ ome_connection_mock_for_groups.strip_substr_dict.return_value = params.get('created_group', {})
+ mocker.patch(MODULE_PATH + 'get_ome_group_by_id', return_value=params.get('created_group', {}))
+ mocker.patch(MODULE_PATH + 'create_parent', return_value=params['created_group'].get('ParentId'))
+ # mocker.patch(MODULE_PATH + 'is_parent_in_subtree', return_value=False)
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == (params['message']).format(op='update')
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ 'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'g3', 'Id': 12, 'TypeId': 2000, 'MembershipTypeId': 24},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': MULTIPLE_GROUPS_MSG, "success": True, 'mparams': {'name': ['g1', 'g3'], 'parent_group_name': 'gp1',
+ 'description': 'State present and multiple groups'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'g3', 'Id': 12, 'TypeId': 2000, 'MembershipTypeId': 24},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': NONEXIST_GROUP_ID, "success": True,
+ 'mparams': {'group_id': 13, 'parent_group_name': 'gp1', 'description': 'State present and no group_id'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'g3', 'Id': 12, 'TypeId': 2000, 'MembershipTypeId': 24},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': INVALID_PARENT, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'g3', 'description': 'State present and invalid parent'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'g3', 'Id': 12, 'TypeId': 2000, 'MembershipTypeId': 24},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': INVALID_GROUPS_DELETE, "success": True,
+ 'mparams': {'name': ['g1', 'g3'], 'state': 'absent', 'description': 'State absent and invalid group'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': NONEXIST_PARENT_ID, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_id': 26, 'description': 'create with non exist parent id'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g2', 'Id': 24, 'TypeId': 2000, 'MembershipTypeId': 24},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': INVALID_PARENT, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_id': 24, 'description': 'create with non exist parent id'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 26, 'ParentId': 25, 'MembershipTypeId': 12}}, {
+ "json_data": {'value': [{'Name': 'g1', 'Id': 24, 'TypeId': 2000, 'ParentId': 25, 'MembershipTypeId': 24,
+ 'Description': 'My group described'},
+ {'Name': 'gp1', 'Id': 25, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': INVALID_GROUPS_MODIFY, "success": True,
+ 'mparams': {'name': 'g1', 'new_name': 'g1', 'parent_group_name': 'gp1', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 24, 'ParentId': 25, 'MembershipTypeId': 12},
+ 'check_mode': True},
+ {"json_data": {'value': [{'Name': 'g1', 'Id': 24, 'TypeId': 3000, 'ParentId': 25, 'MembershipTypeId': 12,
+ 'Description': 'My group described'}, {'Name': 'gp1', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': GROUP_PARENT_SAME, "success": True,
+ 'mparams': {'name': 'g1', 'new_name': 'g1', 'parent_group_name': 'gp1', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 24, 'ParentId': 25, 'MembershipTypeId': 12},
+ 'check_mode': True},
+ {"json_data": {'value': [{'Name': 'x1', 'Id': 24, 'TypeId': 3000, 'ParentId': 25, 'MembershipTypeId': 12,
+ 'Description': 'My group described'},
+ {'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12},
+ {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}]},
+ 'message': GROUP_PARENT_SAME, "success": True,
+ 'mparams': {'name': 'g1', 'parent_group_name': 'g1', 'description': 'My group described'},
+ 'return_data': 22, 'created_group': {'Name': 'g1', 'Id': 24, 'ParentId': 25, 'MembershipTypeId': 12},
+ 'check_mode': True}])
+ def test_ome_groups_fail_jsons(self, params, ome_connection_mock_for_groups, ome_response_mock, ome_default_args,
+ module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['return_data']
+ ome_connection_mock_for_groups.get_all_items_with_pagination.return_value = params['json_data']
+ ome_connection_mock_for_groups.strip_substr_dict.return_value = params.get('created_group', {})
+ mocker.patch(MODULE_PATH + 'get_ome_group_by_id', return_value=params.get('created_group', {}))
+ mocker.patch(MODULE_PATH + 'create_parent', return_value=params['created_group'].get('ParentId'))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{"json_data": 12, "mparams": {'name': 'g1', 'parent_group_name': 'gp21'}}])
+ def test_create_parent(self, params, ome_connection_mock_for_groups, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params['mparams'])
+ static_root = {'Name': 'Static Groups', 'Id': 1, 'TypeId': 2000, 'MembershipTypeId': 12}
+ group_id = self.module.create_parent(ome_connection_mock_for_groups, f_module, static_root)
+ assert group_id == params['json_data']
+
+ @pytest.mark.parametrize("params",
+ [{"json_data": {'Name': 'g2', 'Id': 24, 'TypeId': 3000, 'MembershipTypeId': 12}}])
+ def test_get_ome_group_by_id(self, params, ome_connection_mock_for_groups, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ group = self.module.get_ome_group_by_id(ome_connection_mock_for_groups, 24)
+ assert group == params['json_data']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_groups_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_groups, ome_response_mock):
+ ome_default_args.update({"state": "absent", "name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_valid_groups', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_valid_groups', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_valid_groups',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py
new file mode 100644
index 00000000..93c18d22
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py
@@ -0,0 +1,1346 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_identity_pool
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils._text import to_text
+import json
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_identity_pool(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMeIdentityPool(FakeAnsibleModule):
+ module = ome_identity_pool
+
+ def test_main_ome_identity_pool_success_case1(self, mocker, ome_default_args,
+ ome_connection_mock_for_identity_pool, ome_response_mock):
+ sub_param = {"pool_name": "pool1",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "ethernet_settings": {
+ "starting_mac_address": "50-50-50-50-50-00",
+ "identity_count": 60},
+ "fcoe_settings": {
+ "starting_mac_address": "70-70-70-70-70-00",
+ "identity_count": 75
+ },
+ "iscsi_settings": {
+ "identity_count": 30,
+ "initiator_config": {
+ "iqn_prefix": "iqn.myprefix."
+ },
+ "initiator_ip_pool_settings": {
+ "gateway": "192.168.4.1",
+ "ip_range": "10.33.0.1-10.33.0.255",
+ "primary_dns_server": "10.8.8.8",
+ "secondary_dns_server": "8.8.8.8",
+ "subnet_mask": "255.255.255.0"
+ },
+ "starting_mac_address": "60:60:60:60:60:00"
+ },
+ "fc_settings": {
+ "identity_count": 45,
+ "starting_address": "10-10-10-10-10-10"
+ }
+ }
+ message_return = {"msg": "Successfully created an identity pool.",
+ "result": {"Id": 36, "IsSuccessful": True, "Issues": []}}
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_create_modify',
+ return_value=message_return)
+ ome_default_args.update(sub_param)
+ result = self.execute_module(ome_default_args)
+ assert result['changed'] is True
+ assert 'pool_status' in result and "msg" in result
+ assert result["msg"] == "Successfully created an identity pool."
+ assert result['pool_status'] == {
+ "Id": 36,
+ "IsSuccessful": True,
+ "Issues": []
+ }
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_main_ome_identity_pool_failure_case1(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_identity_pool, ome_response_mock):
+ ome_default_args.update({"pool_name": "pool1"})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_create_modify',
+ side_effect=exc_type("ansible.module_utils.urls.open_url error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_create_modify',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_create_modify',
+ side_effect=exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'pool_status' not in result
+ assert 'msg' in result
+
+ def test_main_ome_identity_pool_no_mandatory_arg_passed_failure_case(self, ome_default_args,
+ ome_connection_mock_for_identity_pool):
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'pool_status' not in result
+
+ @pytest.mark.parametrize("param", [{"ethernet_settings": {"invalid_key": "value"}},
+ {"fcoe_settings": {"invalid_key": "value"}},
+ {"iscsi_settings": {"invalid_key": "value"}},
+ {"iscsi_settings": {"initiator_config": {"invalid_key": "value"}}},
+ {"iscsi_settings": {"initiator_ip_pool_settings": {"gateway1": "192.168.4.1"}}},
+ {"iscsi_settings": {
+ "initiator_ip_pool_settings": {"primary_dns_server": "192.168.4.1",
+ "ip_range1": "value"}}},
+ {"fc_settings": {"invalid_key": "value"}},
+ {"name": "name1"}])
+ def test_main_ome_identity_pool_invalid_settings(self, param, ome_default_args,
+ ome_connection_mock_for_identity_pool):
+ ome_default_args.update(param)
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'pool_status' not in result
+
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ def test_get_success_message(self, action):
+ json_data = {
+ "Id": 36,
+ "IsSuccessful": True,
+ "Issues": []
+ }
+ message = self.module.get_success_message(action, json_data)
+ if action == "create":
+ assert message["msg"] == "Successfully created an identity pool."
+ else:
+ assert message["msg"] == "Successfully modified the identity pool."
+ assert message["result"] == {
+ "Id": 36,
+ "IsSuccessful": True,
+ "Issues": []
+ }
+
+ def test_pool_create_modify_success_case_01(self, mocker, ome_connection_mock_for_identity_pool, ome_response_mock):
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.validate_modify_create_payload')
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(10, {"paylaod": "value"}))
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_payload',
+ return_value={"Name": "name"})
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_success_message',
+ return_value={"msg": "Successfully modified the identity pool"})
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_updated_modify_payload')
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.compare_nested_dict',
+ return_value=False)
+ f_module = self.get_module_mock(params=params)
+ message = self.module.pool_create_modify(f_module, ome_connection_mock_for_identity_pool)
+ assert message == {"msg": "Successfully modified the identity pool"}
+
+ def test_pool_create_modify_success_case_02(self, mocker, ome_connection_mock_for_identity_pool, ome_response_mock):
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.validate_modify_create_payload')
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(0, None))
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_payload',
+ return_value={"Name": "name"})
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_success_message',
+ return_value={"msg": "Successfully created an identity pool"})
+ f_module = self.get_module_mock(params=params)
+ message = self.module.pool_create_modify(f_module, ome_connection_mock_for_identity_pool)
+ assert message == {"msg": "Successfully created an identity pool"}
+
+ def test_pool_create_modify_success_case_03(self, mocker, ome_connection_mock_for_identity_pool, ome_response_mock):
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(10, {"payload": "value"}))
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_payload',
+ return_value={"Name": "pool1"})
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.get_success_message',
+ return_value={"msg": "Successfully modified the identity pool"})
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_updated_modify_payload')
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.compare_nested_dict',
+ return_value=True)
+ f_module = self.get_module_mock(params=params)
+ with pytest.raises(Exception) as exc:
+ self.module.pool_create_modify(f_module, ome_connection_mock_for_identity_pool)
+ return exc.value.args[0] == "No changes are made to the specified pool name: pool1, as" \
+ " as the entered values are the same as the current configuration."
+
+ def test_get_payload_create_case01(self):
+ params = {"pool_name": "pool1",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "ethernet_settings": {
+ "starting_mac_address": "50-50-50-50-50-00",
+ "identity_count": 60},
+ "fcoe_settings": {
+ "starting_mac_address": "70-70-70-70-70-00",
+ "identity_count": 75
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module)
+ assert payload == {
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet and fcoe settings",
+ "EthernetSettings": {"Mac": {
+ "StartingMacAddress": "UFBQUFAA",
+ "IdentityCount": 60}},
+ "FcoeSettings": {"Mac": {
+ "StartingMacAddress": "cHBwcHAA",
+ "IdentityCount": 75}},
+ }
+
+ def test_get_payload_create_case02(self):
+ """new_pool_name should be ignored for create action"""
+ params = {"pool_name": "pool1",
+ "new_pool_name": "pool2",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "ethernet_settings": {
+ "starting_mac_address": "50-50-50-50-50-00",
+ "identity_count": 60},
+ "fcoe_settings": {
+ "starting_mac_address": "70-70-70-70-70-00",
+ "identity_count": 75
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module)
+ assert payload == {
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet and fcoe settings",
+ "EthernetSettings": {"Mac": {
+ "StartingMacAddress": "UFBQUFAA",
+ "IdentityCount": 60}},
+ "FcoeSettings": {"Mac": {
+ "StartingMacAddress": "cHBwcHAA",
+ "IdentityCount": 75}},
+ }
+ assert payload["Name"] == "pool1"
+
+ def test_get_payload_create_case03(self):
+ """new_pool_name should be ignored for create action"""
+ params = {
+ "ethernet_settings": {
+ "identity_count": 60,
+ "starting_mac_address": "50:50:50:50:50:00"
+ },
+ "fc_settings": {
+ "identity_count": 45,
+ "starting_address": "10-10-10-10-10-10"
+ },
+ "fcoe_settings": {
+ "identity_count": 75,
+ "starting_mac_address": "aabb.ccdd.7070"
+ },
+ "hostname": "192.168.0.1",
+ "iscsi_settings": {
+ "identity_count": 30,
+ "initiator_config": {
+ "iqn_prefix": "iqn.myprefix."
+ },
+ "initiator_ip_pool_settings": {
+ "gateway": "192.168.4.1",
+ "ip_range": "10.33.0.1-10.33.0.255",
+ "primary_dns_server": "10.8.8.8",
+ "secondary_dns_server": "8.8.8.8",
+ "subnet_mask": "255.255.255.0"
+ },
+ "starting_mac_address": "60:60:60:60:60:00"
+ },
+ "new_pool_name": None,
+ "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
+ "pool_description": "Identity pool with Ethernet, FCoE, ISCSI and FC settings",
+ "pool_name": "pool1",
+ "port": 443,
+ "state": "present",
+ "username": "admin"
+ }
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module)
+ assert payload == {
+ "Name": "pool1",
+ "Description": "Identity pool with Ethernet, FCoE, ISCSI and FC settings",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 60,
+ "StartingMacAddress": "UFBQUFAA"
+ }
+ },
+ "IscsiSettings": {
+ "Mac": {
+ "IdentityCount": 30,
+ "StartingMacAddress": "YGBgYGAA"
+ },
+ "InitiatorConfig": {
+ "IqnPrefix": "iqn.myprefix."
+ },
+ "InitiatorIpPoolSettings": {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ },
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 75,
+ "StartingMacAddress": "qrvM3XBw"
+ }
+ },
+ "FcSettings": {
+ "Wwnn": {
+ "IdentityCount": 45,
+ "StartingAddress": "IAAQEBAQEBA="
+ },
+ "Wwpn": {
+ "IdentityCount": 45,
+ "StartingAddress": "IAEQEBAQEBA="
+ }
+ }
+ }
+ assert payload["FcSettings"]["Wwnn"] == {"IdentityCount": 45, "StartingAddress": "IAAQEBAQEBA="}
+ assert payload["FcSettings"]["Wwpn"] == {"IdentityCount": 45, "StartingAddress": "IAEQEBAQEBA="}
+ assert payload["IscsiSettings"]["Mac"] == {"IdentityCount": 30, "StartingMacAddress": "YGBgYGAA"}
+ assert payload["IscsiSettings"]["InitiatorIpPoolSettings"] == {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ assert payload["IscsiSettings"]["InitiatorConfig"] == {
+ "IqnPrefix": "iqn.myprefix."
+ }
+
+ @pytest.mark.parametrize("state", ["create", "modify"])
+ def test_get_payload_create_modify_case04(self, state):
+ """new_pool_name should be ignored for create action"""
+ params = {"pool_name": "pool3",
+ "new_pool_name": "pool4",
+ "pool_description": "Identity pool with iscsi",
+ "iscsi_settings": {
+ "identity_count": 30,
+ "initiator_config": {
+ "iqn_prefix": "iqn.myprefix."
+ },
+ "initiator_ip_pool_settings": {
+ "gateway": "192.168.4.1",
+ "ip_range": "20.33.0.1-20.33.0.255",
+ "primary_dns_server": "10.8.8.8",
+ "secondary_dns_server": "8.8.8.8",
+ "subnet_mask": "255.255.255.0"
+ },
+ "starting_mac_address": "10:10:10:10:10:00"
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ if state == "create":
+ payload = self.module.get_payload(f_module)
+ else:
+ payload = self.module.get_payload(f_module, 11)
+ assert "FcSettings" not in payload
+ assert "FcoeSettings" not in payload
+ assert payload["IscsiSettings"]["Mac"] == {"IdentityCount": 30, "StartingMacAddress": "EBAQEBAA"}
+ assert payload["IscsiSettings"]["InitiatorIpPoolSettings"] == {
+ "IpRange": "20.33.0.1-20.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ assert payload["IscsiSettings"]["InitiatorConfig"] == {
+ "IqnPrefix": "iqn.myprefix."
+ }
+ if state == "create":
+ assert payload["Name"] == "pool3"
+ assert "Id" not in payload
+ else:
+ assert payload["Name"] == "pool4"
+ assert payload["Id"] == 11
+
+ @pytest.mark.parametrize("state", ["create", "modify"])
+ def test_get_payload_create_case05(self, state):
+ """new_pool_name should be ignored for create action and considered in modify"""
+ params = {"pool_name": "pool3",
+ "new_pool_name": "pool4",
+ "pool_description": "Identity pool with iscsi",
+ "fc_settings": {
+ "identity_count": 48,
+ "starting_address": "40:40:40:40:40:22"
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ if state == "create":
+ payload = self.module.get_payload(f_module)
+ else:
+ payload = self.module.get_payload(f_module, 11)
+ return_setting = {
+ "Name": "pool2",
+ "Description": "Identity pool with fc_settings",
+ "EthernetSettings": None,
+ "IscsiSettings": None,
+ "FcoeSettings": None,
+ "FcSettings": {
+ "Wwnn": {
+ "IdentityCount": 48,
+ "StartingAddress": "IABAQEBAQCI="
+ },
+ "Wwpn": {
+ "IdentityCount": 48,
+ "StartingAddress": "IAFAQEBAQCI="
+ }
+ }
+ }
+
+ assert payload["FcSettings"]["Wwnn"]["StartingAddress"] == "IABAQEBAQCI="
+ assert payload["FcSettings"]["Wwpn"]["StartingAddress"] == "IAFAQEBAQCI="
+ assert payload["FcSettings"]["Wwnn"]["IdentityCount"] == 48
+ assert payload["FcSettings"]["Wwpn"]["IdentityCount"] == 48
+ if state == "create":
+ assert payload["Name"] == "pool3"
+ assert "Id" not in payload
+ else:
+ assert payload["Name"] == "pool4"
+ assert payload["Id"] == 11
+
+ def test_get_payload_create_case06(self):
+ params = {"pool_name": "pool1",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "fcoe_settings": {"starting_mac_address": "70-70-70-70-70-00",
+ "identity_count": 75
+ }}
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module)
+ assert payload["Name"] == "pool1"
+ assert "Id" not in payload
+ assert "FcoeSettings" in payload
+ assert "Ethernet_Settings" not in payload
+
+ @pytest.mark.parametrize("state", ["create", "modify"])
+ def test_get_payload_create_case07(self, state):
+ # case when new_pool_name not passed
+ params = {"pool_name": "pool1",
+ "pool_description": "Identity pool with ethernet and fcoe settings"}
+ f_module = self.get_module_mock(params=params)
+ if state == "create":
+ payload = self.module.get_payload(f_module, None)
+ else:
+ payload = self.module.get_payload(f_module, 11)
+ assert payload["Name"] == "pool1"
+ if state == "modify":
+ assert "Id" in payload
+ else:
+ assert "Id" not in payload
+ assert "FcoeSettings" not in payload
+ assert "Ethernet_Settings" not in payload
+ assert "Ethernet_Settings" not in payload
+ assert "Ethernet_Settings" not in payload
+
+ def test_get_payload_modify_case01(self):
+ """moify action Name should be updated with ne_pool_name and Id has to be updated"""
+ params = {"pool_name": "pool1",
+ "new_pool_name": "pool2",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "ethernet_settings": {"starting_mac_address": "50-50-50-50-50-00",
+ "identity_count": 60},
+ "fcoe_settings": {
+ "starting_mac_address": "70-70-70-70-70-00",
+ "identity_count": 75
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module, 10)
+ assert payload == {
+ "Id": 10,
+ "Name": "pool2",
+ "Description": "Identity pool with ethernet and fcoe settings",
+ "EthernetSettings": {"Mac": {
+ "StartingMacAddress": "UFBQUFAA",
+ "IdentityCount": 60}},
+ "FcoeSettings": {"Mac": {
+ "StartingMacAddress": "cHBwcHAA",
+ "IdentityCount": 75}},
+ }
+ assert payload["Name"] == "pool2"
+ assert payload["Id"] == 10
+
+ def test_get_payload_modify_case02(self):
+ """payload for only ethernet setting
+ if ne_ppol_name not passed payload Name should be updated with I(pool_name)
+ """
+ params = {"pool_name": "pool1",
+ "pool_description": "Identity pool with ethernet and fcoe settings",
+ "ethernet_settings": {"starting_mac_address": "50-50-50-50-50-00",
+ "identity_count": 60
+ }
+ }
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module, 10)
+ assert payload["Name"] == "pool1"
+ assert payload["Id"] == 10
+ assert "FcoeSettings" not in payload
+ assert "EthernetSettings" in payload
+ assert payload == {'Description': 'Identity pool with ethernet and fcoe settings',
+ 'Name': 'pool1',
+ 'Id': 10,
+ 'EthernetSettings': {
+ 'Mac':
+ {'StartingMacAddress': 'UFBQUFAA', 'IdentityCount': 60
+ }
+ }
+ }
+
+ def test_get_payload_modify_case03(self):
+ params = {"pool_name": "pool1", "new_pool_name": "pool2"}
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module, 11)
+ assert payload["Name"] == "pool2"
+ assert payload["Id"] == 11
+ assert "Description" not in payload
+ assert "FcoeSettings" not in payload
+ assert "Ethernet_Settings" not in payload
+
+ def test_get_payload_modify_case04(self):
+ """check case when I(new_pool_name) is empty string
+ ome is accepting it"""
+ params = {"pool_name": "pool1", "new_pool_name": ""}
+ f_module = self.get_module_mock(params=params)
+ payload = self.module.get_payload(f_module, 11)
+ assert payload["Name"] == ""
+ assert payload["Id"] == 11
+ assert "Description" not in payload
+ assert "FcoeSettings" not in payload
+ assert "Ethernet_Settings" not in payload
+
+ def test_update_mac_settings_case_01(self):
+ f_module = self.get_module_mock()
+ settings_params = {"starting_mac_address": "70-70-70-70-70-00", "identity_count": 10}
+ payload = {"Name": "pool1"}
+ self.module.update_mac_settings(payload, settings_params, "Ethernet_Settings", f_module)
+ assert payload == {
+ "Name": "pool1",
+ "Ethernet_Settings": {"Mac": {"StartingMacAddress": "cHBwcHAA", "IdentityCount": 10}}
+ }
+
+ def test_update_mac_settings_case_02(self):
+ f_module = self.get_module_mock()
+ settings_params = {"starting_mac_address": "70-70-70-70-70-xx", "identity_count": 10}
+ payload = {"Name": "pool1"}
+ with pytest.raises(Exception) as exc:
+ self.module.update_mac_settings(payload, settings_params, "EthernetSettings", f_module)
+ assert exc.value.args[0] == "Please provide the valid MAC address format for Ethernet settings."
+
+ def test_update_mac_settings_case_03(self):
+ """case when no sub settting exists"""
+ settings_params = {}
+ payload = {"Name": "pool1"}
+ f_module = self.get_module_mock()
+ self.module.update_mac_settings(payload, settings_params, "Ethernet_Settings", f_module)
+ assert payload == {
+ "Name": "pool1"
+ }
+
+ def test_get_identity_pool_id_by_name_exist_case(self, mocker, ome_connection_mock_for_identity_pool,
+ ome_response_mock):
+ pool_list = {"resp_obj": ome_response_mock, "report_list": [{"Name": "pool1", "Id": 10},
+ {"Name": "pool11", "Id": 11}]}
+ ome_connection_mock_for_identity_pool.get_all_report_details.return_value = pool_list
+ pool_id, attributes = self.module.get_identity_pool_id_by_name("pool1", ome_connection_mock_for_identity_pool)
+ assert pool_id == 10
+
+ def test_get_identity_pool_id_by_name_non_exist_case(self, mocker, ome_connection_mock_for_identity_pool,
+ ome_response_mock):
+ pool_list = {"resp_obj": ome_response_mock, "report_list": [{"Name": "pool2", "Id": 10}]}
+ ome_connection_mock_for_identity_pool.get_all_report_details.return_value = pool_list
+ pool_id, attributes = self.module.get_identity_pool_id_by_name("pool1", ome_connection_mock_for_identity_pool)
+ assert pool_id == 0 and attributes is None
+
+ def test_compare_payload_attributes_false_case_for_dummy_pool_setting(self):
+ """this put opeartion always gives success result without applying
+ changes because identity count is not passed as pat of it"""
+ modify_setting_payload = {'Name': 'pool4', 'EthernetSettings': {'Mac': {'StartingMacAddress': 'qrvM3e6q'}},
+ 'Id': 33}
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(33)",
+ "Id": 33,
+ "Name": "pool4",
+ "Description": None,
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 14:53:18.59163",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 15:22:08.34596",
+ "EthernetSettings": None,
+ "IscsiSettings": None,
+ "FcoeSettings": None,
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(33)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(33)/UsageIdentitySets"
+ }
+ val = self.module.compare_nested_dict(modify_setting_payload, existing_setting_payload)
+ assert val is False
+
+ @pytest.mark.parametrize("modify_payload",
+ [{"Description": "Identity pool with ethernet and fcoe settings2"}, {"Name": "pool2"},
+ {"EthernetSettings": {"Mac": {"IdentityCount": 61, "StartingMacAddress": "UFBQUFAA"}}},
+ {"EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "qrvM3e6q"}}},
+ {"FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "abcdfe"}}},
+ {"FcoeSettings": {"Mac": {"IdentityCount": 71, "StartingMacAddress": "cHBwcHAA"}}},
+ {"EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "cHBwcHAA"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}}},
+ {"Description": "Identity pool with ethernet and fcoe settings2",
+ "EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "UFBQUFAA"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}}])
+ def test_compare_payload_attributes_case_false(self, modify_payload):
+ """case when chages are exists and payload can be used for modify opeartion"""
+ modify_setting_payload = modify_payload
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(23)",
+ "Id": 23,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet and fcoe settings1",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 09:28:16.491424",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 09:49:59.012549",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 60,
+ "StartingMacAddress": "UFBQUFAA"
+ }
+ },
+ "IscsiSettings": None,
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "cHBwcHAA"
+ }
+ },
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(23)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(23)/UsageIdentitySets"
+ }
+ val = self.module.compare_nested_dict(modify_setting_payload, existing_setting_payload)
+ assert val is False
+
+ @pytest.mark.parametrize("modify_payload", [
+ {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "qrvM3e6q"}}},
+ {"Name": "pool1", "EthernetSettings": {"Mac": {"IdentityCount": 70}}},
+ {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "qrvM3e6q"}}},
+ {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "qrvM3e6q"}},
+ "FcoeSettings": {"Mac": {"StartingMacAddress": "cHBwcHAA"}}},
+ {"EthernetSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}}},
+ {"Description": "Identity pool with ethernet setting"},
+ {"Name": "pool1"},
+ {"FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}},
+ {"EthernetSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}},
+ {"Description": "Identity pool with ethernet setting",
+ "EthernetSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}}])
+ def test_compare_payload_attributes_case_true(self, modify_payload):
+ """setting values are same as existing payload and no need to apply the changes again"""
+ modify_setting_payload = modify_payload
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)",
+ "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "qrvM3e6q"
+ }
+ },
+ "IscsiSettings": None,
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "cHBwcHAA"
+ }
+ },
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"
+ }
+ val = self.module.compare_nested_dict(modify_setting_payload, existing_setting_payload)
+ assert val is True
+
+ def test_get_updated_modify_payload_case_01(self):
+ """when setting not exists in current requested payload, update payload from existing setting value if exists"""
+ payload = {"Name": "pool1"}
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)",
+ "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "qrvM3e6q"
+ }
+ },
+ "IscsiSettings": None,
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "cHBwcHAA"
+ }
+ },
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"
+ }
+ payload = self.module.get_updated_modify_payload(payload, existing_setting_payload)
+ assert payload["Description"] == "Identity pool with ethernet setting"
+ assert payload["EthernetSettings"]["Mac"]["IdentityCount"] == 70
+ assert payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "qrvM3e6q"
+ assert payload["FcoeSettings"]["Mac"]["IdentityCount"] == 70
+ assert payload["FcoeSettings"]["Mac"]["StartingMacAddress"] == "cHBwcHAA"
+
+ def test_get_updated_modify_payload_case_02(self):
+ """when setting exists in current requested payload, do not
+ update payload from existing setting value if exists"""
+ payload = {"Name": "pool1", "EthernetSettings": {"Mac": {"IdentityCount": 55, "StartingMacAddress": "abcd"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 65, "StartingMacAddress": "xyz"}}}
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)",
+ "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "qrvM3e6q"
+ }
+ },
+ "IscsiSettings": None,
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "cHBwcHAA"
+ }
+ },
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"
+ }
+ payload = self.module.get_updated_modify_payload(payload, existing_setting_payload)
+ assert payload["Description"] == "Identity pool with ethernet setting"
+ assert payload["EthernetSettings"]["Mac"]["IdentityCount"] == 55
+ assert payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "abcd"
+ assert payload["FcoeSettings"]["Mac"]["IdentityCount"] == 65
+ assert payload["FcoeSettings"]["Mac"]["StartingMacAddress"] == "xyz"
+
+ def test_get_updated_modify_payload_case_03(self):
+ """update new description"""
+ payload = {"Name": "pool1", "Description": "new description"}
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)",
+ "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": None,
+ "IscsiSettings": None,
+ "FcoeSettings": None,
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"
+ }
+ payload = self.module.get_updated_modify_payload(payload, existing_setting_payload)
+ assert payload["Description"] == "new description"
+ assert "EthernetSettings" not in payload
+ assert "FcoeSettings" not in payload
+
+ def test_get_updated_modify_payload_case_04(self):
+ """update remaining parameter of ethernet and fcoe setting
+ if not exists in payload but exists in existing setting payload"""
+ payload = {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "abcd"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 65}}}
+ existing_setting_payload = {
+ "@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)",
+ "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin",
+ "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "qrvM3e6q"
+ }
+ },
+ "IscsiSettings": None,
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 70,
+ "StartingMacAddress": "cHBwcHAA"
+ }
+ },
+ "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"
+ },
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"
+ }
+ payload = self.module.get_updated_modify_payload(payload, existing_setting_payload)
+ assert payload["Description"] == "Identity pool with ethernet setting"
+ assert payload["EthernetSettings"]["Mac"]["IdentityCount"] == 70
+ assert payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "abcd"
+ assert payload["FcoeSettings"]["Mac"]["IdentityCount"] == 65
+ assert payload["FcoeSettings"]["Mac"]["StartingMacAddress"] == "cHBwcHAA"
+
+ def test_get_updated_modify_payload_case_05(self):
+ """update remaining parameter of ethernet and fcoe setting will be null if not exists in existing payload"""
+ payload = {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "abcd"}}, }
+ existing_setting_payload = {"@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)", "Id": 30,
+ "Name": "pool1",
+ "Description": "Identity pool with ethernet setting", "CreatedBy": "admin",
+ "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin", "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {"Mac": {"StartingMacAddress": "qrvM3e6q"}},
+ "IscsiSettings": None,
+ "FcoeSettings": {"Mac": {"StartingMacAddress": "cHBwcHAA"}}, "FcSettings": None,
+ "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"},
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"}
+ payload = self.module.get_updated_modify_payload(payload, existing_setting_payload)
+ assert payload["Description"] == "Identity pool with ethernet setting"
+ assert payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "abcd"
+ assert "IdentityCount" not in payload["EthernetSettings"]["Mac"]
+
+ @pytest.mark.parametrize("setting", ["EthernetSettings", "FcoeSettings"])
+ def test_get_updated_modify_payload_case_06(self, setting):
+ modify_payload = {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "abcd"}}, }
+ existing_payload = {"@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(35)",
+ "Id": 35, "Name": "pool1",
+ "Description": "Identity pool with ethernet and fcoe settings1",
+ "CreatedBy": "admin", "CreationTime": "2020-02-01 07:55:59.923838",
+ "LastUpdatedBy": "admin", "LastUpdateTime": "2020-02-01 07:55:59.923838",
+ "EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "UFBQUFAA"}},
+ "IscsiSettings": None,
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}},
+ "FcSettings": None,
+ "UsageCounts": {"@odata.id": "/api/IdentityPoolService/IdentityPools(35)/UsageCounts"},
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(35)/UsageIdentitySets"}
+ modify_payload = self.module.get_updated_modify_payload(modify_payload, existing_payload)
+ assert modify_payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "abcd"
+ assert modify_payload["EthernetSettings"]["Mac"]["IdentityCount"] == 60
+ assert modify_payload["FcoeSettings"]["Mac"]["StartingMacAddress"] == "cHBwcHAA"
+ assert modify_payload["FcoeSettings"]["Mac"]["IdentityCount"] == 70
+
+ @pytest.mark.parametrize("setting", ["EthernetSettings", "FcoeSettings"])
+ def test_update_modify_setting_case_success(self, setting):
+ modify_payload = {"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "abcd"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 55}}}
+ existing_payload = {"@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(35)",
+ "Id": 35, "Name": "pool1",
+ "Description": "Identity pool with ethernet and fcoe settings1",
+ "CreatedBy": "admin", "CreationTime": "2020-02-01 07:55:59.923838",
+ "LastUpdatedBy": "admin", "LastUpdateTime": "2020-02-01 07:55:59.923838",
+ "EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "UFBQUFAA"}},
+ "IscsiSettings": None,
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}},
+ "FcSettings": None,
+ "UsageCounts": {"@odata.id": "/api/IdentityPoolService/IdentityPools(35)/UsageCounts"},
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(35)/UsageIdentitySets"}
+ if setting == "EthernetSettings":
+ self.module.update_modify_setting(modify_payload, existing_payload, setting, ["Mac"])
+ assert modify_payload["EthernetSettings"]["Mac"]["StartingMacAddress"] == "abcd"
+ assert modify_payload["EthernetSettings"]["Mac"]["IdentityCount"] == 60
+ else:
+ self.module.update_modify_setting(modify_payload, existing_payload, setting, ["Mac"])
+ assert modify_payload["FcoeSettings"]["Mac"]["StartingMacAddress"] == "cHBwcHAA"
+ assert modify_payload["FcoeSettings"]["Mac"]["IdentityCount"] == 55
+
+ @pytest.mark.parametrize("mac_address",
+ ['50-50-50-50-50-50', '50:50:50:50:50:50', '5050.5050.5050', 'ab:cd:ef:70:80:70',
+ 'aabb.ccdd.7070'])
+ def test_mac_validation_match_case(self, mac_address):
+ """valid MAC address formats"""
+ match = self.module.mac_validation(mac_address)
+ assert match is not None
+
+ @pytest.mark.parametrize("mac_address", ['50--50--50--50--50-50',
+ '50::50::50::50::50::50',
+ '5050..5050..5050',
+ 'ab/cd/ef/70/80/70',
+ '50-50:50.50-50-50',
+ 'xy:gh:yk:lm:30:10',
+ '50-50-50-50-50',
+ '50-50-50-50-50-50-50-50'])
+ def test_mac_validation_match_case(self, mac_address):
+ match = self.module.mac_validation(mac_address)
+ assert match is None
+
+ @pytest.mark.parametrize("mac_address_base64_map", [{'50-50-50-50-50-50': 'UFBQUFBQ'},
+ {'50:50:50:50:50:50': 'UFBQUFBQ'},
+ {'5050.5050.5050': 'UFBQUFBQ'},
+ {'ab:cd:ef:70:80:70': 'q83vcIBw'},
+ {'20-00-50-50-50-50-50-50': 'IABQUFBQUFA='},
+ {'20-01-50-50-50-50-50-50': 'IAFQUFBQUFA='},
+ {'20:00:50:50:50:50:50:50': 'IABQUFBQUFA='},
+ {'20:01:50:50:50:50:50:50': 'IAFQUFBQUFA='},
+ {'2000.5050.5050.5050': 'IABQUFBQUFA='},
+ {'2001.5050.5050.5050': 'IAFQUFBQUFA='},
+ {'20:00:ab:cd:ef:70:80:70': 'IACrze9wgHA='},
+ {'20:01:ab:cd:ef:70:80:70': 'IAGrze9wgHA='},
+ ])
+ def test_mac_to_base64_conversion(self, mac_address_base64_map):
+ f_module = self.get_module_mock()
+ mac_address = list(mac_address_base64_map.keys())[0]
+ base_64_val_expected = list(mac_address_base64_map.values())[0]
+ base_64_val = self.module.mac_to_base64_conversion(mac_address, f_module)
+ assert base_64_val == base_64_val_expected
+
+ def test_pool_delete_case_01(self, ome_connection_mock_for_identity_pool, mocker):
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(1, {"value": "data"}))
+ f_module = self.get_module_mock(params=params)
+ message = self.module.pool_delete(f_module, ome_connection_mock_for_identity_pool)
+ assert message["msg"] == "Successfully deleted the identity pool."
+
+ def test_pool_delete_case_02(self, ome_connection_mock_for_identity_pool, mocker):
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(0, {}))
+ f_module = self.get_module_mock(params=params)
+ with pytest.raises(Exception) as exc:
+ self.module.pool_delete(f_module, ome_connection_mock_for_identity_pool)
+ assert exc.value.args[0] == "The identity pool '{0}' is not present in the system.".format(params["pool_name"])
+
+ def test_pool_delete_error_case_02(self, mocker, ome_connection_mock_for_identity_pool, ome_response_mock):
+ msg = "exception message"
+ params = {"pool_name": "pool_name"}
+ mocker.patch(
+ MODULE_PATH + 'ome_identity_pool.get_identity_pool_id_by_name',
+ return_value=(1, "data"))
+ f_module = self.get_module_mock(params=params)
+ ome_connection_mock_for_identity_pool.invoke_request.side_effect = Exception(msg)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.pool_delete(f_module, ome_connection_mock_for_identity_pool)
+
+ def test_main_ome_identity_pool_delete_success_case1(self, mocker, ome_default_args,
+ ome_connection_mock_for_identity_pool, ome_response_mock):
+ sub_param = {"pool_name": "pool1",
+ "state": "absent", }
+ message_return = {"msg": "Successfully deleted the identity pool."}
+ mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_delete',
+ return_value=message_return)
+ ome_default_args.update(sub_param)
+ result = self.execute_module(ome_default_args)
+ assert 'pool_status' not in result
+ assert result["msg"] == "Successfully deleted the identity pool."
+
+ def test_validate_modify_create_payload_no_exception_case(self):
+ modify_payload = {
+ "Id": 59,
+ "Name": "pool_new",
+ "EthernetSettings": {
+ "Mac": {
+ "IdentityCount": 61,
+ "StartingMacAddress": "kJCQkJCQ"
+ }
+ },
+ "IscsiSettings": {
+ "Mac": {
+ "IdentityCount": 30,
+ "StartingMacAddress": "YGBgYGAA"
+ },
+ "InitiatorConfig": {
+ "IqnPrefix": "iqn.myprefix."
+ },
+ "InitiatorIpPoolSettings": {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ },
+ "FcoeSettings": {
+ "Mac": {
+ "IdentityCount": 77,
+ "StartingMacAddress": "qrvM3VBQ"
+ }
+ },
+ "FcSettings": {
+ "Wwnn": {
+ "IdentityCount": 45,
+ "StartingAddress": "IAAQEBAQEBA="
+ },
+ "Wwpn": {
+ "IdentityCount": 45,
+ "StartingAddress": "IAEQEBAQEBA="
+ }
+ }
+ }
+ f_module = self.get_module_mock()
+ self.module.validate_modify_create_payload(modify_payload, f_module, "create")
+
+ modify_payload1 = {
+ "Mac": {
+ "IdentityCount": 61,
+ }
+ }
+ modify_payload2 = {
+ "Mac": {
+ "StartingMacAddress": "kJCQkJCQ"
+ }
+ }
+
+ modify_payload3 = {
+ "Mac": {
+ }
+ }
+
+ modify_payload4 = {
+ "Mac": None
+ }
+
+ @pytest.mark.parametrize("setting", ["EthernetSettings", "FcoeSettings"])
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ @pytest.mark.parametrize("payload", [modify_payload1, modify_payload2, modify_payload3, modify_payload4])
+ def test_validate_modify_create_payload_failure_case1(self, payload, action, setting):
+ modify_payload = {"Id": 59, "Name": "pool_new"}
+ modify_payload[setting] = payload
+ f_module = self.get_module_mock()
+ msg = "Both starting MAC address and identity count is required to {0} an identity pool using {1} settings.".format(
+ action, ''.join(setting.split('Settings')))
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.validate_modify_create_payload(modify_payload, f_module, action)
+
+ modify_fc_setting1 = {"FcSettings": {
+ "Wwnn": {
+ "IdentityCount": 45,
+ },
+ "Wwpn": {
+ "IdentityCount": 45,
+ }
+ }}
+ modify_fc_setting2 = {"FcSettings": {
+ "Wwnn": {
+ "StartingAddress": "IAAQEBAQEBA="
+ },
+ "Wwpn": {
+ "IdentityCount": 45,
+ "StartingAddress": "IAEQEBAQEBA="
+ }
+ }}
+ modify_fc_setting3 = {"FcSettings": {
+ "Wwnn": {
+ "StartingAddress": "IAAQEBAQEBA="
+ },
+ "Wwpn": {
+ "StartingAddress": "IAEQEBAQEBA="
+ }
+ }}
+ modify_fc_setting4 = {"FcSettings": {
+ "Wwnn": {
+ },
+ "Wwpn": {
+ }
+ }}
+ modify_fc_setting5 = {"FcSettings": {
+ "Wwnn": None,
+ "Wwpn": None}}
+
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ @pytest.mark.parametrize("modify_payload",
+ [modify_fc_setting1, modify_fc_setting2, modify_fc_setting3, modify_fc_setting4,
+ modify_fc_setting5])
+ def test_validate_modify_create_payload_failure_fc_setting_case(self, modify_payload, action):
+ payload = {"Id": 59, "Name": "pool_new"}
+ modify_payload.update(payload)
+ f_module = self.get_module_mock()
+ msg = "Both starting MAC address and identity count is required to {0} an identity pool using Fc settings.".format(
+ action)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.validate_modify_create_payload(modify_payload, f_module, action)
+
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ @pytest.mark.parametrize("modify_payload",
+ [modify_fc_setting1, modify_fc_setting2, modify_fc_setting3, modify_fc_setting4,
+ modify_fc_setting5])
+ # @pytest.mark.parametrize("modify_payload", [modify_fc_setting1])
+ def test_validate_modify_create_payload_failure_fc_setting_case(self, modify_payload, action):
+ payload = {"Id": 59, "Name": "pool_new"}
+ modify_payload.update(payload)
+ f_module = self.get_module_mock()
+ msg = "Both starting MAC address and identity count is required to {0} an identity pool using Fc settings.".format(
+ action)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.validate_modify_create_payload(modify_payload, f_module, action)
+
+ payload_iscsi1 = {"IscsiSettings": {
+ "Mac": {
+ "IdentityCount": 30
+ }}}
+
+ payload_iscsi2 = {"IscsiSettings": {
+ "Mac": {
+ "StartingMacAddress": "kJCQkJCQ"
+ }}}
+ payload_iscsi3 = {"IscsiSettings": {
+ "Mac": {
+ }}}
+
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ @pytest.mark.parametrize("modify_payload", [payload_iscsi1, payload_iscsi2, payload_iscsi3])
+ def test_validate_modify_create_payload_failure_iscsi_setting_case1(self, modify_payload, action):
+ payload = {"Id": 59, "Name": "pool_new"}
+ modify_payload.update(payload)
+ f_module = self.get_module_mock()
+ msg = "Both starting MAC address and identity count is required to {0} an identity pool using Iscsi settings.".format(
+ action)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.validate_modify_create_payload(modify_payload, f_module, action)
+
+ payload_iscsi3 = {
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+
+ payload_iscsi4 = {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ payload_iscsi5 = {
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+
+ @pytest.mark.parametrize("action", ["create", "modify"])
+ @pytest.mark.parametrize("initiatorip_payload",
+ [payload_iscsi3, payload_iscsi4, payload_iscsi5])
+ def test_validate_modify_create_payload_failure_iscsi_setting_case2(self, initiatorip_payload, action):
+ modify_payload = {"Id": 59, "Name": "pool_new",
+ "IscsiSettings": {"Mac": {
+ "IdentityCount": 30,
+ "StartingMacAddress": "kJCQkJCQ"
+ },
+ "InitiatorConfig": {"IqnPrefix": "abc"}},
+ }
+ modify_payload["IscsiSettings"]["InitiatorIpPoolSettings"] = initiatorip_payload
+ f_module = self.get_module_mock()
+ msg = "Both ip range and subnet mask in required to {0} an identity pool using iSCSI settings.".format(action)
+ with pytest.raises(Exception, match=msg):
+ self.module.validate_modify_create_payload(modify_payload, f_module, action)
+
+ def test_update_fc_settings_success_case1(self):
+ setting_params = {
+ "identity_count": 45,
+ "starting_address": "10-10-10-10-10-10"
+ }
+ payload = {"Name": "pool_name"}
+ f_module = self.get_module_mock()
+ self.module.update_fc_settings(payload, setting_params, "FcSettings", f_module)
+ assert payload == {
+ "Name": "pool_name",
+ 'FcSettings': {'Wwnn': {'IdentityCount': 45, 'StartingAddress': 'IAAQEBAQEBA='},
+ 'Wwpn': {'IdentityCount': 45, 'StartingAddress': 'IAEQEBAQEBA='}
+ }
+ }
+
+ def test_update_fc_settings_success_case2(self):
+ setting_params = {
+ "identity_count": 45
+ }
+ payload = {"Name": "pool_name"}
+ f_module = self.get_module_mock()
+ self.module.update_fc_settings(payload, setting_params, "FcSettings", f_module)
+ assert payload == {
+ "Name": "pool_name",
+ 'FcSettings': {'Wwnn': {'IdentityCount': 45},
+ 'Wwpn': {'IdentityCount': 45}}
+ }
+
+ def test_update_fc_settings_success_case3(self):
+ setting_params = {
+ "starting_address": "10-10-10-10-10-10"
+ }
+ payload = {"Name": "pool_name"}
+ f_module = self.get_module_mock()
+ self.module.update_fc_settings(payload, setting_params, "FcSettings", f_module)
+ assert payload == {
+ "Name": "pool_name",
+ 'FcSettings': {'Wwnn': {'StartingAddress': 'IAAQEBAQEBA='},
+ 'Wwpn': {'StartingAddress': 'IAEQEBAQEBA='}
+ }
+ }
+
+ def test_update_fc_settings_mac_failure_case1(self):
+ setting_params = {
+ "identity_count": 45,
+ "starting_address": "abcd.1010:1010"
+ }
+ payload = {"Name": "pool_name"}
+ setting_type = "FcSettings"
+ f_module = self.get_module_mock()
+ msg = "Please provide the valid starting address format for FC settings."
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.update_fc_settings(payload, setting_params, setting_type, f_module)
+
+ @pytest.mark.parametrize("mac", [{'50-50-50-50-50-50': ['20-00-', '20-01-']},
+ {'50:50:50:50:50:50': ['20:00:', '20:01:']},
+ {'5050.5050.5050': ['2000.', '2001.']},
+ {'ab:cd:ef:70:80:70': ['20:00:', '20:01:']},
+ {'aabb.ccdd.7070': ['2000.', '2001.']}])
+ def test_get_wwn_address(self, mac):
+ mac_address = list(mac.keys())[0]
+ expected_values = list(mac.values())[0]
+ wwnn_address_expected = expected_values[0]
+ wwpn_address_expected = expected_values[1]
+ wwnn_address, wwpn_address = self.module.get_wwn_address_prefix(mac_address)
+ assert wwnn_address == wwnn_address_expected
+ assert wwpn_address == wwpn_address_expected
+
+ def test_update_iscsi_specific_settings_case1(self):
+ setting_type = "IscsiSettings"
+ payload = {"Name": "pool_new", setting_type: {"Mac": {"IdentityCount": 30, "StartingMacAddress": "YGBgYGAA"}}}
+ settings_params = {
+ "identity_count": 30,
+ "initiator_config": {
+ "iqn_prefix": "iqn.myprefix."
+ },
+ "initiator_ip_pool_settings": {
+ "gateway": "192.168.4.1",
+ "ip_range": "10.33.0.1-10.33.0.255",
+ "primary_dns_server": "10.8.8.8",
+ "secondary_dns_server": "8.8.8.8",
+ "subnet_mask": "255.255.255.0"
+ },
+ "starting_mac_address": "60:60:60:60:60:00"
+ }
+ self.module.update_iscsi_specific_settings(payload, settings_params, setting_type)
+ assert payload == {
+ "Name": "pool_new",
+ "IscsiSettings": {
+ "Mac": {
+ "IdentityCount": 30,
+ "StartingMacAddress": "YGBgYGAA"
+ },
+ "InitiatorConfig": {
+ "IqnPrefix": "iqn.myprefix."
+ },
+ "InitiatorIpPoolSettings": {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1",
+ "PrimaryDnsServer": "10.8.8.8",
+ "SecondaryDnsServer": "8.8.8.8"
+ }
+ }}
+
+ def test_update_iscsi_specific_settings_case2(self):
+ setting_type = "IscsiSettings"
+ payload = {"Name": "pool_new", "Description": "description"}
+ settings_params = {
+ "initiator_ip_pool_settings": {
+ "gateway": "192.168.4.1",
+ "ip_range": "10.33.0.1-10.33.0.255",
+ "subnet_mask": "255.255.255.0"
+ }
+ }
+ self.module.update_iscsi_specific_settings(payload, settings_params, setting_type)
+ assert payload == {
+ "Name": "pool_new", "Description": "description",
+ "IscsiSettings": {
+ "InitiatorIpPoolSettings": {
+ "IpRange": "10.33.0.1-10.33.0.255",
+ "SubnetMask": "255.255.255.0",
+ "Gateway": "192.168.4.1"
+ }
+ }}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py
new file mode 100644
index 00000000..34de35d1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_job_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+class TestOmeJobInfo(FakeAnsibleModule):
+ """Pyest class for ome_job_info module."""
+ module = ome_job_info
+
+ @pytest.fixture
+ def ome_connection_job_info_mock(self, mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_job_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+ @pytest.mark.parametrize("module_params,data", [({"system_query_options": {"filter": "abc"}}, "$filter")])
+ def test_get_query_parameters(self, module_params, data):
+ res = self.module._get_query_parameters(module_params)
+ if data is not None:
+ assert data in res
+ else:
+ assert res is None
+
+ def test_job_info_success_case(self, ome_default_args, ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = {"@odata.context": "/api/$metadata#Collection(JobService.Job)",
+ "@odata.count": 1}
+ ome_response_mock.success = True
+ job_details = {"resp_obj": ome_response_mock,
+ "report_list": [{"Name": "job1", "Id": 123}, {"Name": "job2", "Id": 124}]}
+ ome_connection_job_info_mock.get_all_report_details.return_value = job_details
+ result = self._run_module(ome_default_args)
+ assert 'job_info' in result
+ assert result['msg'] == "Successfully fetched the job info"
+
+ def test_job_info_main_success_case_job_id(self, ome_default_args, ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"job_id": 1})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"job_id": 1}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'job_info' in result
+
+ def test_job_info_success_case03(self, ome_default_args, ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"system_query_options": {"filter": "abc"}})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"filter": "abc"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'job_info' in result
+
+ def test_job_info_failure_case(self, ome_default_args, ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 500
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == "Failed to fetch the job info"
+
+ @pytest.mark.parametrize("exc_type", [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_job_info_main_exception_case(self, exc_type, mocker, ome_default_args, ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'ome_job_info._get_query_parameters',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'ome_job_info._get_query_parameters',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(ome_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py
new file mode 100644
index 00000000..44ceef4d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py
@@ -0,0 +1,243 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.0.0
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+import json
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_port_breakout
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_breakout_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.RestOME"))
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEPortBreakout(FakeAnsibleModule):
+ module = ome_network_port_breakout
+
+ def test_get_payload(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ payload = self.module.get_breakout_payload("25017", "HardwareDefault", "2HB7NX2:phy-port1/1/11")
+ assert payload["JobName"] == "Breakout Port"
+
+ def test_check_mode(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.check_mode(f_module, changes=True)
+ assert exc.value.args[0] == "Changes found to commit!"
+
+ def test_get_device_id(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X40GE"})
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = {"value": [{"Id": 25017, "DeviceServiceTag": "2HB7NX2"}]}
+ result = self.module.get_device_id(f_module, ome_connection_breakout_mock)
+ assert result == 25017
+
+ def test_get_device_id_regex_failed(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-:port1/1/11", "breakout_type": "1X40GE"})
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_id(f_module, ome_connection_breakout_mock)
+ assert exc.value.args[0] == "Invalid target port 2HB7NX2:phy-:port1/1/11."
+
+ def test_get_device_id_invalid_status(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X40GE"})
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = {"value": []}
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_id(f_module, ome_connection_breakout_mock)
+ assert exc.value.args[0] == "Unable to retrieve the device information because the" \
+ " device with the entered service tag 2HB7NX2 is not present."
+
+ def test_get_port_information(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X40GE"})
+ ome_response_mock.json_data = {"InventoryInfo": [{"Configuration": "HardwareDefault",
+ "Id": "2HB7NX2:phy-port1/1/11",
+ "PortBreakoutCapabilities": [{"Type": "1X40GE"},
+ {"Type": "1X10GE"},
+ {"Type": "HardwareDefault"}]}]}
+ config, capability, interface = self.module.get_port_information(f_module, ome_connection_breakout_mock, 25017)
+ assert config == "HardwareDefault"
+
+ def test_get_port_information_failed(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X40GE"})
+ ome_response_mock.json_data = {"InventoryInfo": [{"Configuration": "NoBreakout",
+ "Id": "2HB7NX2:phy-port1/1/11",
+ "PortBreakoutCapabilities": [{"Type": "1X40GE"},
+ {"Type": "1X10GE"},
+ {"Type": "HardwareDefault"}]}]}
+ with pytest.raises(Exception) as exc:
+ self.module.get_port_information(f_module, ome_connection_breakout_mock, 25017)
+ assert exc.value.args[0] == "2HB7NX2:phy-port1/1/11 does not support port breakout" \
+ " or invalid port number entered."
+
+ def test_set_breakout_port(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X40GE"})
+ capability = [{"Type": "1X40GE"}, {"Type": "1X10GE"}, {"Type": "HardwareDefault"}]
+ payload = {
+ "Id": 0, "JobName": "Breakout Port", "JobDescription": "",
+ "Schedule": "startnow", "State": "Enabled",
+ "JobType": {"Id": 3, "Name": "DeviceAction_Task"},
+ "Params": [
+ {"Key": "breakoutType", "Value": "1X40GE"},
+ {"Key": "interfaceId", "Value": "2HB7NX2:phy-port1/1/11"},
+ {"Key": "operationName", "Value": "CONFIGURE_PORT_BREAK_OUT"}],
+ "Targets": [
+ {"JobId": 0, "Id": 25017, "Data": "", "TargetType": {"Id": 4000, "Name": "DEVICE"}}
+ ]}
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value=payload)
+ ome_response_mock.status_code = 200
+ result = self.module.set_breakout(f_module, ome_connection_breakout_mock, "HardwareDefault",
+ capability, "2HB7NX2:phy-port1/1/11", 25017)
+ assert result.status_code == 200
+
+ def test_set_breakout_port_invalid(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X100GE"})
+ capability = [{"Type": "1X40GE"}, {"Type": "1X10GE"}, {"Type": "HardwareDefault"}]
+ payload = {
+ "Id": 0, "JobName": "Breakout Port", "JobDescription": "",
+ "Schedule": "startnow", "State": "Enabled",
+ "JobType": {"Id": 3, "Name": "DeviceAction_Task"},
+ "Params": [
+ {"Key": "breakoutType", "Value": "1X40GE"},
+ {"Key": "interfaceId", "Value": "2HB7NX2:phy-port1/1/11"},
+ {"Key": "operationName", "Value": "CONFIGURE_PORT_BREAK_OUT"}],
+ "Targets": [
+ {"JobId": 0, "Id": 25017, "Data": "", "TargetType": {"Id": 4000, "Name": "DEVICE"}}
+ ]}
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value=payload)
+ with pytest.raises(Exception) as exc:
+ self.module.set_breakout(f_module, ome_connection_breakout_mock, "HardwareDefault",
+ capability, "2HB7NX2:phy-port1/1/11", 25017)
+ assert exc.value.args[0] == "Invalid breakout type: 1X100GE, supported values are 1X40GE, " \
+ "1X10GE, HardwareDefault."
+
+ def test_set_breakout_port_reset(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11",
+ "breakout_type": "HardwareDefault"})
+ capability = [{"Type": "1X40GE"}, {"Type": "1X10GE"}, {"Type": "HardwareDefault"}]
+ payload = {
+ "Id": 0, "JobName": "Breakout Port", "JobDescription": "",
+ "Schedule": "startnow", "State": "Enabled",
+ "JobType": {"Id": 3, "Name": "DeviceAction_Task"},
+ "Params": [
+ {"Key": "breakoutType", "Value": "1X40GE"},
+ {"Key": "interfaceId", "Value": "2HB7NX2:phy-port1/1/11"},
+ {"Key": "operationName", "Value": "CONFIGURE_PORT_BREAK_OUT"}],
+ "Targets": [
+ {"JobId": 0, "Id": 25017, "Data": "", "TargetType": {"Id": 4000, "Name": "DEVICE"}}
+ ]}
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value=payload)
+ ome_response_mock.status_code = 200
+ result = self.module.set_breakout(f_module, ome_connection_breakout_mock, "1X40GE",
+ capability, "2HB7NX2:phy-port1/1/11", 25017)
+ assert result.status_code == 200
+
+ def test_set_breakout_port_symmetry(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11",
+ "breakout_type": "1X40GE"})
+ capability = [{"Type": "1X40GE"}, {"Type": "1X10GE"}, {"Type": "HardwareDefault"}]
+ payload = {
+ "Id": 0, "JobName": "Breakout Port", "JobDescription": "",
+ "Schedule": "startnow", "State": "Enabled",
+ "JobType": {"Id": 3, "Name": "DeviceAction_Task"},
+ "Params": [
+ {"Key": "breakoutType", "Value": "1X40GE"},
+ {"Key": "interfaceId", "Value": "2HB7NX2:phy-port1/1/11"},
+ {"Key": "operationName", "Value": "CONFIGURE_PORT_BREAK_OUT"}],
+ "Targets": [
+ {"JobId": 0, "Id": 25017, "Data": "", "TargetType": {"Id": 4000, "Name": "DEVICE"}}
+ ]}
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value=payload)
+ with pytest.raises(Exception) as exc:
+ self.module.set_breakout(f_module, ome_connection_breakout_mock, "1X40GE",
+ capability, "2HB7NX2:phy-port1/1/11", 25017)
+ assert exc.value.args[0] == "The port is already configured with the selected breakout configuration."
+
+ def test_set_breakout_port_asymmetry(self, ome_connection_breakout_mock, ome_response_mock, ome_default_args, mocker):
+ f_module = self.get_module_mock(params={"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X20GE"})
+ capability = [{"Type": "1X40GE"}, {"Type": "1X10GE"}, {"Type": "HardwareDefault"}]
+ payload = {
+ "Id": 0, "JobName": "Breakout Port", "JobDescription": "",
+ "Schedule": "startnow", "State": "Enabled",
+ "JobType": {"Id": 3, "Name": "DeviceAction_Task"},
+ "Params": [
+ {"Key": "breakoutType", "Value": "1X40GE"},
+ {"Key": "interfaceId", "Value": "2HB7NX2:phy-port1/1/11"},
+ {"Key": "operationName", "Value": "CONFIGURE_PORT_BREAK_OUT"}],
+ "Targets": [
+ {"JobId": 0, "Id": 25017, "Data": "", "TargetType": {"Id": 4000, "Name": "DEVICE"}}
+ ]}
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value=payload)
+ with pytest.raises(Exception) as exc:
+ self.module.set_breakout(f_module, ome_connection_breakout_mock, "1X40GE",
+ capability, "2HB7NX2:phy-port1/1/11", 25017)
+ assert exc.value.args[0] == "Device does not support changing a port breakout" \
+ " configuration to different breakout type. Configure the port to" \
+ " HardwareDefault and retry the operation."
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_main_exception(self, exc_type, mocker, ome_default_args, ome_connection_breakout_mock, ome_response_mock):
+ ome_default_args.update({"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X20GE"})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_breakout_mock.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_connection_breakout_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value={})
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_port_information"),
+ return_value=(None, None, None))
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.set_breakout"),
+ return_value={})
+ result = self._run_module(ome_default_args)
+ assert 'msg' in result
+
+ def test_main(self, mocker, ome_default_args, ome_connection_breakout_mock, ome_response_mock):
+ ome_default_args.update({"target_port": "2HB7NX2:phy-port1/1/11", "breakout_type": "1X20GE"})
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_device_id"),
+ return_value=25017)
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_breakout_payload"),
+ return_value={})
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.get_port_information"),
+ return_value=("HardwareDefault", [{"Type": "1X40GE"}, {"Type": "1X20GE"}],
+ "2HB7NX2:phy-port1/1/11"))
+ mocker.patch("{0}{1}".format(MODULE_PATH, "ome_network_port_breakout.set_breakout"),
+ return_value=ome_response_mock)
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Port breakout configuration job submitted successfully."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py
new file mode 100644
index 00000000..e7b7a05c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py
@@ -0,0 +1,210 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_vlan
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_network_vlan.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_network_vlan(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeNetworkVlan(FakeAnsibleModule):
+ module = ome_network_vlan
+
+ @pytest.mark.parametrize("params",
+ [{"success": True, "json_data": {"value": [{"Name": "vlan_name", "Id": 123}]}, "id": 123},
+ {"success": True, "json_data": {"value": []}, "id": 0},
+ {"success": False, "json_data": {"value": [{"Name": "vlan_name", "Id": 123}]}, "id": 0},
+ {"success": True, "json_data": {"value": [{"Name": "vlan_name1", "Id": 123}]}, "id": 0}])
+ def test_get_item_id(self, params, ome_connection_mock_for_network_vlan, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ id, vlans = self.module.get_item_id(ome_connection_mock_for_network_vlan, "vlan_name", "uri")
+ assert id == params["id"]
+
+ @pytest.mark.parametrize("vlan_param",
+ [{"in": {"name": "vlan1", "type": 1, "vlan_maximum": 40, "vlan_minimum": 35},
+ "out": {"Name": "vlan1", "Type": 1, "VlanMaximum": 40, "VlanMinimum": 35}},
+ {"in": None, "out": None}])
+ def test_format_payload(self, vlan_param):
+ result = self.module.format_payload(vlan_param["in"])
+ assert result == vlan_param["out"]
+
+ def test_delete_vlan(self, ome_connection_mock_for_network_vlan, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {}
+ f_module = self.get_module_mock(params={"name": "vlan1"})
+ with pytest.raises(Exception, match="Successfully deleted the VLAN.") as err:
+ self.module.delete_vlan(f_module, ome_connection_mock_for_network_vlan, 12)
+
+ @pytest.mark.parametrize("params",
+ [{"format_payload": {"VlanMaximum": None, "VlanMinimum": 35},
+ "error_msg": "The vlan_minimum, vlan_maximum and type values are required for creating"
+ " a VLAN.", "overlap": {}},
+ {"format_payload": {"VlanMaximum": 40, "VlanMinimum": 45}, "overlap": {},
+ "error_msg": "VLAN-minimum value is greater than VLAN-maximum value."},
+ {"format_payload": {"VlanMaximum": 40, "VlanMinimum": 35},
+ "overlap": {"Name": "vlan1", "Type": 1, "VlanMaximum": 40, "VlanMinimum": 35},
+ "error_msg": "Unable to create or update the VLAN because the entered range"
+ " overlaps with vlan1 with the range 35-40."},
+ {"format_payload": {"VlanMaximum": 40, "VlanMinimum": 35},
+ "error_msg": "Network type 'General Purpose (Silver)' not found.",
+ "overlap": {}},
+ {"format_payload": {"VlanMaximum": 40, "VlanMinimum": 35}, "item": 1, "overlap": {},
+ "check_mode": True, "error_msg": "Changes found to be applied."},
+ ])
+ def test_create_vlan(self, mocker, params, ome_connection_mock_for_network_vlan, ome_response_mock):
+ f_module = self.get_module_mock(params={"name": "vlan1", "vlan_maximum": 40, "vlan_minimum": 35,
+ "type": "General Purpose (Silver)"}, check_mode=params.get("check_mode", False))
+ mocker.patch(MODULE_PATH + "format_payload", return_value=(params["format_payload"]))
+ mocker.patch(MODULE_PATH + "check_overlapping_vlan_range", return_value=(params["overlap"]))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(0, []))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.create_vlan(f_module, ome_connection_mock_for_network_vlan, [])
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params",
+ [{"format_payload": {"VlanMaximum": 40, "VlanMinimum": 35},
+ "error_msg": "Network type 'General Purpose (Silver)' not found.",
+ "overlap": {}},
+ {"format_payload": {"Name": "vlan11", "Type": 1, "VlanMaximum": 40, "VlanMinimum": 45},
+ "overlap": {}, "item": 1,
+ "error_msg": "VLAN-minimum value is greater than VLAN-maximum value."},
+ {"format_payload": {"VlanMaximum": 40, "VlanMinimum": 35}, "item": 1,
+ "overlap": {"Name": "vlan1", "Type": 1, "VlanMaximum": 40, "VlanMinimum": 35},
+ "error_msg": "Unable to create or update the VLAN because the entered range"
+ " overlaps with vlan1 with the range 35-40."},
+ {"format_payload": {"Name": "vlan11", "Type": 1, "VlanMaximum": 45, "VlanMinimum": 40},
+ "item": 1, "overlap": {},
+ "check_mode": True, "error_msg": "Changes found to be applied."},
+ ])
+ def test_modify_vlan(self, mocker, params, ome_connection_mock_for_network_vlan, ome_response_mock):
+ f_module = self.get_module_mock(params={"name": "vlan1", "vlan_maximum": 40, "vlan_minimum": 45,
+ "type": "General Purpose (Silver)"},
+ check_mode=params.get("check_mode", False))
+ mocker.patch(MODULE_PATH + "format_payload", return_value=(params["format_payload"]))
+ mocker.patch(MODULE_PATH + "check_overlapping_vlan_range", return_value=(params["overlap"]))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("item", 0), []))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.modify_vlan(f_module, ome_connection_mock_for_network_vlan, 123,
+ [{"Id": 13, "Name": "vlan11", "Type": 1, "VlanMaximum": 140, "VlanMinimum": 135},
+ {"Id": 123, "Name": "vlan1", "Type": 1, "VlanMaximum": 40, "VlanMinimum": 35,
+ 'Description': None}])
+ assert err.value.args[0] == error_message
+
+ def test_main_case_create_success(self, mocker, ome_default_args, ome_connection_mock_for_network_vlan, ome_response_mock):
+ mocker.patch(MODULE_PATH + "check_existing_vlan", return_value=(0, [{"VlanMaximum": 40, "VlanMinimum": 35}]))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(1, []))
+ mocker.patch(MODULE_PATH + "check_overlapping_vlan_range", return_value=None)
+ ome_default_args.update(
+ {"name": "vlan1", "state": "present", "type": "General Purpose (Bronze)",
+ "vlan_maximum": 40, "vlan_minimum": 35})
+ ome_response_mock.json_data = {"Id": 14227, "Name": "vlan1", "Type": 1,
+ "VlanMaximum": 40, "VlanMinimum": 35}
+ result = self._run_module(ome_default_args)
+ # assert result['changed'] is True
+ assert "msg" in result
+ assert result['vlan_status'] == {"Id": 14227, "Name": "vlan1", "Type": 1,
+ "VlanMaximum": 40, "VlanMinimum": 35}
+ assert result["msg"] == "Successfully created the VLAN."
+
+ def test_main_case_modify_success(self, mocker, ome_default_args, ome_connection_mock_for_network_vlan, ome_response_mock):
+ mocker.patch(MODULE_PATH + "check_existing_vlan", return_value=(1, [{"Id": 1, "VlanMaximum": 40, "VlanMinimum": 35}]))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(2, []))
+ mocker.patch(MODULE_PATH + "check_overlapping_vlan_range", return_value=None)
+ ome_default_args.update(
+ {"name": "vlan1", "state": "present", "type": "General Purpose (Bronze)",
+ "vlan_maximum": 40, "vlan_minimum": 35})
+ ome_response_mock.json_data = {"Id": 14227, "Name": "vlan1", "Type": 2, "VlanMaximum": 40, "VlanMinimum": 35}
+ result = self._run_module(ome_default_args)
+ # assert result['changed'] is True
+ assert "msg" in result
+ assert result['vlan_status'] == {"Id": 14227, "Name": "vlan1", "Type": 2, "VlanMaximum": 40, "VlanMinimum": 35}
+ assert result["msg"] == "Successfully updated the VLAN."
+
+ @pytest.mark.parametrize("params", [
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "check_existing_vlan": (1, []), "check_mode": True,
+ "mparams": {"state": "absent", "name": "v1"},
+ 'message': "Changes found to be applied.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "check_existing_vlan": (None, []), "check_mode": True,
+ "mparams": {"state": "absent", "name": "v1"},
+ 'message': "No changes found to be applied to the VLAN configuration.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "check_existing_vlan": (None, []), "check_mode": False,
+ "mparams": {"state": "absent", "name": "v1"},
+ 'message': "VLAN v1 does not exist.", "success": True
+ }
+ ])
+ def test_main(self, params, ome_connection_mock_for_network_vlan, ome_default_args, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'check_existing_vlan', return_value=params.get("check_existing_vlan"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ if params.get("fail_json", False):
+ result = self._run_module_with_fail_json(ome_default_args)
+ else:
+ result = self._run_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result["msg"] == params['message']
+
+ @pytest.mark.parametrize("params",
+ [{"payload": {"VlanMaximum": 40, "VlanMinimum": 35},
+ "vlans": [{"VlanMaximum": 40, "VlanMinimum": 35}],
+ "current_vlan": {"VlanMaximum": 40, "VlanMinimum": 35}}])
+ def test_check_overlapping_vlan_range(self, params, ome_connection_mock_for_network_vlan, ome_response_mock):
+ result = self.module.check_overlapping_vlan_range(params["payload"], params["vlans"])
+ assert result == params["current_vlan"]
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_vlan_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_network_vlan,
+ ome_response_mock):
+ ome_default_args.update({"name": "vlan1", "state": "present", "type": "General Purpose (Bronze)",
+ "vlan_maximum": 40, "vlan_minimum": 35})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_existing_vlan', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_existing_vlan', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_existing_vlan',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'vlan_status' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py
new file mode 100644
index 00000000..084fcd85
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_vlan_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+response = {
+ '@odata.context': '/api/$metadata#Collection(NetworkConfigurationService.Network)',
+ '@odata.count': 1,
+ 'value': [
+ {
+ '@odata.type': '#NetworkConfigurationService.Network',
+ '@odata.id': '/api/NetworkConfigurationService/Networks(20057)',
+ 'Id': 20057,
+ 'Name': 'Logical Network - 1',
+ 'Description': 'Description of Logical Network - 1',
+ 'VlanMaximum': 111,
+ 'VlanMinimum': 111,
+ "Type": 1,
+ 'CreatedBy': 'admin',
+ 'CreationTime': '2020-09-02 18:48:42.129',
+ 'UpdatedBy': None,
+ 'UpdatedTime': '2020-09-02 18:48:42.129',
+ 'InternalRefNWUUId': '42b9903d-93f8-4184-adcf-0772e4492f71'
+ }
+ ]
+}
+
+network_type_qos_type_dict_reponse = {1: {'Id': 1, 'Name': 'General Purpose (Bronze)',
+ 'Description':
+ 'This is the network for general purpose traffic. QOS Priority : Bronze.',
+ 'VendorCode': 'GeneralPurpose', 'NetworkTrafficType': 'Ethernet',
+ 'QosType': {'Id': 4, 'Name': 'Bronze'}}}
+
+network_type_dict_response = {1: {'Id': 1, 'Name': 'General Purpose (Bronze)',
+ 'Description':
+ 'This is the network for general purpose traffic. QOS Priority : Bronze.',
+ 'VendorCode': 'GeneralPurpose', 'NetworkTrafficType': 'Ethernet',
+ 'QosType': 4}}
+
+qos_type_dict_response = {4: {'Id': 4, 'Name': 'Bronze'}}
+
+type_dict_ome_reponse = {'@odata.context': '/api/$metadata#Collection(NetworkConfigurationService.Network)',
+ '@odata.count': 1,
+ 'value': [
+ {'@odata.type': '#NetworkConfigurationService.NetworkType',
+ '@odata.id': '/api/NetworkConfigurationService/NetworkTypes(1)',
+ 'Id': 1,
+ 'Name': 'General Purpose (Bronze)',
+ 'Description': 'This is the network for general purpose traffic. QOS Priority : Bronze.',
+ 'VendorCode': 'GeneralPurpose', 'NetworkTrafficType': 'Ethernet',
+ 'QosType': 4}]}
+
+
+class TestOmeNetworkVlanInfo(FakeAnsibleModule):
+ """Pytest class for ome_network_vlan_info module."""
+ module = ome_network_vlan_info
+
+ @pytest.fixture
+ def ome_connection_network_vlan_info_mock(self, mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ 'ansible_collections.dellemc.openmanage.plugins.modules.ome_network_vlan_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+ def test_get_network_vlan_info_success_case(self, mocker, ome_default_args, ome_connection_network_vlan_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = response
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
+ return_value=network_type_qos_type_dict_reponse)
+ result = self._run_module(ome_default_args)
+ print(result)
+ assert 'network_vlan_info' in result
+ assert result['msg'] == "Successfully retrieved the network VLAN information."
+
+ def test_get_network_vlan_info_by_id_success_case(self, mocker, ome_default_args,
+ ome_connection_network_vlan_info_mock, ome_response_mock):
+ ome_default_args.update({"id": 20057})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = response
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
+ return_value=network_type_qos_type_dict_reponse)
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'network_vlan_info' in result
+ assert result['msg'] == "Successfully retrieved the network VLAN information."
+
+ def test_get_network_vlan_info_by_name_success_case(self, mocker, ome_default_args,
+ ome_connection_network_vlan_info_mock, ome_response_mock):
+ ome_default_args.update({"name": "Logical Network - 1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = response
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
+ return_value=network_type_qos_type_dict_reponse)
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'network_vlan_info' in result
+ assert result['msg'] == "Successfully retrieved the network VLAN information."
+
+ def test_get_network_type_and_qos_type_information(self, mocker, ome_connection_network_vlan_info_mock):
+ mocker.patch(MODULE_PATH + 'ome_network_vlan_info.get_type_information',
+ side_effect=[network_type_dict_response, qos_type_dict_response])
+ result = self.module.get_network_type_and_qos_type_information(ome_connection_network_vlan_info_mock)
+ assert result[1]['QosType']['Id'] == 4
+
+ def test_get_type_information(self, mocker, ome_default_args,
+ ome_connection_network_vlan_info_mock, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = type_dict_ome_reponse
+ ome_response_mock.status_code = 200
+ result = self.module.get_type_information(ome_connection_network_vlan_info_mock, '')
+ assert result[1]['QosType'] == 4
+
+ def test_network_vlan_info_failure_case(self, ome_default_args, ome_connection_network_vlan_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 500
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == "Failed to retrieve the network VLAN information."
+
+ def test_network_vlan_info_name_failure_case(self, ome_default_args, ome_connection_network_vlan_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"name": "non-existing vlan"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = response
+ ome_response_mock.status_code = 200
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'network_vlan_info' not in result
+ assert result['msg'] == "Provided network VLAN with name - 'non-existing vlan' does not exist."
+
+ @pytest.mark.parametrize("exc_type", [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_network_vlan_info_info_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_network_vlan_info_mock, ome_response_mock):
+ ome_response_mock.status_code = 404
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ ome_connection_network_vlan_info_mock.invoke_request.side_effect = exc_type(
+ "ansible.module_utils.urls.open_url error")
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type == HTTPError:
+ ome_connection_network_vlan_info_mock.invoke_request.side_effect = exc_type(
+ 'http://testhost.com', 400, '<400 bad request>', {"accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+ assert 'error_info' in result
+
+ ome_connection_network_vlan_info_mock.invoke_request.side_effect = exc_type(
+ 'http://testhost.com', 404, '<404 not found>', {"accept-type": "application/json"}, StringIO(json_str))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+ elif exc_type != SSLValidationError:
+ mocker.patch(MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
+ side_effect=exc_type('test'))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+ else:
+ mocker.patch(MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
+ side_effect=exc_type('http://testhost.com', 404, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py
new file mode 100644
index 00000000..707e495c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py
@@ -0,0 +1,436 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.3.0
+# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from io import StringIO
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_powerstate
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_powerstate_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_powerstate.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmePowerstate(FakeAnsibleModule):
+ module = ome_powerstate
+
+ payload = {
+ "Builtin": False,
+ "CreatedBy": "admin",
+ "Editable": True,
+ "EndTime": None,
+ "Id": 29099,
+ "JobDescription": "Firmware Update Task",
+ "JobName": "Firmware Update Task",
+ "JobStatus": {
+ "Id": 2080,
+ "Name": "New"
+ },
+ "JobType": {
+ "Id": 5,
+ "Internal": False,
+ "Name": "Update_Task"
+ },
+ "LastRun": None,
+ "LastRunStatus": {
+ "Id": 2200,
+ "Name": "NotRun"
+ },
+ "NextRun": None,
+ "Params": [
+ {
+ "JobId": 29099,
+ "Key": "operationName",
+ "Value": "INSTALL_FIRMWARE"
+ },
+ {
+ "JobId": 29099,
+ "Key": "complianceUpdate",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "stagingValue",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "signVerify",
+ "Value": "true"
+ }
+ ],
+ "Schedule": "startnow",
+ "StartTime": None,
+ "State": "Enabled",
+ "Targets": [
+ {
+ "Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577776981156",
+ "Id": 28628,
+ "JobId": 29099,
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }
+ ],
+ "UpdatedBy": None,
+ "Visible": True
+ }
+
+ @pytest.mark.parametrize("param", [payload])
+ def test_spawn_update_job_case(self, param, ome_response_mock,
+ ome_connection_powerstate_mock):
+ ome_response_mock.status_code = 201
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {
+ "Builtin": False,
+ "CreatedBy": "admin",
+ "Editable": True,
+ "EndTime": None,
+ "Id": 29099,
+ "JobDescription": "Firmware Update Task",
+ "JobName": "Firmware Update Task",
+ "JobStatus": {
+ "Id": 2080,
+ "Name": "New"
+ },
+ "JobType": {
+ "Id": 5,
+ "Internal": False,
+ "Name": "Update_Task"
+ },
+ "LastRun": None,
+ "LastRunStatus": {
+ "Id": 2200,
+ "Name": "NotRun"
+ },
+ "NextRun": None,
+ "Params": [
+ {
+ "JobId": 29099,
+ "Key": "operationName",
+ "Value": "INSTALL_FIRMWARE"
+ },
+ {
+ "JobId": 29099,
+ "Key": "complianceUpdate",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "stagingValue",
+ "Value": "false"
+ },
+ {
+ "JobId": 29099,
+ "Key": "signVerify",
+ "Value": "true"
+ }
+ ],
+
+ "Schedule": "startnow",
+ "StartTime": None,
+ "State": "Enabled",
+ "Targets": [{
+ "Data": "DCIM:INSTALLED#741__BIOS.Setup.1-1=1577776981156",
+ "Id": 28628,
+ "JobId": 29099,
+ "TargetType": {
+ "Id": 1000,
+ "Name": "DEVICE"
+ }
+ }],
+ "UpdatedBy": None,
+ "Visible": True
+ }
+ data = self.module.spawn_update_job(ome_connection_powerstate_mock, param)
+ assert data == param
+
+ def test_build_power_state_payload_success_case(self, ome_connection_powerstate_mock):
+
+ payload = self.module.build_power_state_payload(Constants.device_id1, "off", 2000)
+ assert payload == {
+ 'Id': 0,
+ 'JobDescription': 'DeviceAction_Task',
+ 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {
+ 'Id': 3,
+ 'Name': 'DeviceAction_Task'
+ },
+ 'Params': [
+ {
+ 'Key': 'operationName',
+ 'Value': 'POWER_CONTROL'
+ },
+ {
+ 'Key': 'powerState',
+ 'Value': '2000'
+ }
+ ],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [
+ {
+ 'Data': '',
+ 'Id': 1234,
+ 'TargetType': {
+ 'Id': 'off',
+ 'Name': 'DEVICE'
+ }
+ }
+ ]
+ }
+
+ def test_get_device_state_success_case01(self, ome_connection_powerstate_mock, ome_response_mock):
+ json_data = {
+ "report_list": [{"Id": Constants.device_id1, "PowerState": "on", "Type": 1000}]}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ data = self.module.get_device_state(f_module, json_data, Constants.device_id1)
+ assert data == ("on", 1000)
+
+ def test_get_device_state_fail_case01(self, ome_connection_powerstate_mock, ome_response_mock):
+ json_data = {
+ "report_list": [{"Id": Constants.device_id1, "PowerState": "on", "Type": 4000}]}
+ ome_response_mock.status_code = 500
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_state(f_module, json_data, Constants.device_id1)
+ assert exc.value.args[0] == "Unable to complete the operation because power" \
+ " state supports device type 1000 and 2000."
+
+ def test_get_device_state_fail_case02(self, ome_connection_powerstate_mock, ome_response_mock):
+ json_data = {
+ "report_list": [{"Id": 1224, "power_state": "on", "Type": 1000}]}
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_state(f_module, json_data, Constants.device_id1)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target" \
+ " device id '{0}' is invalid.".format(1234)
+
+ def test_main_powerstate_success_case01(self, ome_default_args, mocker, ome_connection_powerstate_mock,
+ ome_response_mock):
+ mocker.patch(
+ MODULE_PATH + 'ome_powerstate.get_device_resource',
+ return_value={"Repository": "payload"})
+ ome_default_args.update({"device_id": "11111", "power_state": "off"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"device_id": "11111", "power_state": "off"}]}
+ ome_response_mock.status_code = 200
+ data = self._run_module(ome_default_args)
+ assert data['changed'] is True
+ assert data['msg'] == "Power State operation job submitted successfully."
+
+ def test_main_powerstate_success_case02(self, ome_default_args, mocker, ome_connection_powerstate_mock,
+ ome_response_mock):
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_resource',
+ return_value={"Repository": "payload"})
+ ome_default_args.update({"device_service_tag": "KLBR111", "power_state": "on"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"device_id": "11111", "power_state": "on"}]}
+ ome_response_mock.status_code = 200
+ data = self._run_module(ome_default_args)
+ assert data['changed'] is True
+ assert data['msg'] == "Power State operation job submitted successfully."
+
+ def test_main_powerstate_failure_case(self, ome_default_args, mocker, ome_connection_powerstate_mock,
+ ome_response_mock):
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_resource',
+ return_value={"Repository": "payload"})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.spawn_update_job',
+ return_value="payload")
+ ome_default_args.update({"device_service_tag": None, "power_state": "on"})
+ ome_response_mock.json_data = {"value": [{"device_service_tag": None, "power_state": "on"}]}
+ ome_response_mock.status_code = 500
+ data = self._run_module_with_fail_json(ome_default_args)
+ assert data['msg'] == "device_id and device_service_tag attributes should not be None."
+
+ def test_get_device_resource_success_case01(self, mocker, ome_default_args, ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_id": Constants.service_tag1, "power_state": "on", "Type": 1000,
+ "device_service_tag": Constants.service_tag1})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_state',
+ return_value=('on', 1000))
+ mocker.patch(MODULE_PATH + 'ome_powerstate.build_power_state_payload',
+ return_value={'Id': 0, 'JobDescription': 'DeviceAction_Task',
+ 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {'Id': 3, 'Name': 'DeviceAction_Task'},
+ 'Params': [{'Key': 'operationName', 'Value': 'POWER_CONTROL'},
+ {'Key': 'powerState', 'Value': '2000'}],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [{'Data': '',
+ 'Id': 1234,
+ 'TargetType': {'Id': 'off',
+ 'Name': 'DEVICE'}}]})
+ ome_connection_powerstate_mock.get_all_report_details.return_value = {
+ 'report_list': [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.service_tag1,
+ "power_state": "on"}]}
+ f_module = self.get_module_mock(params=ome_default_args)
+ f_module.check_mode = False
+ data = self.module.get_device_resource(f_module, ome_connection_powerstate_mock)
+ assert data == {'Id': 0, 'JobDescription': 'DeviceAction_Task', 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {'Id': 3, 'Name': 'DeviceAction_Task'},
+ 'Params': [{'Key': 'operationName', 'Value': 'POWER_CONTROL'},
+ {'Key': 'powerState', 'Value': '2000'}],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [{'Data': '',
+ 'Id': 1234,
+ 'TargetType': {'Id': 'off',
+ 'Name': 'DEVICE'}}]}
+
+ def test_get_device_resource_success_case02(self, mocker, ome_default_args, ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_id": Constants.service_tag1, "power_state": "on", "Type": 1000,
+ "device_service_tag": Constants.service_tag1})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_state',
+ return_value=('on', 1000))
+ mocker.patch(MODULE_PATH + 'ome_powerstate.build_power_state_payload',
+ return_value={'Id': 0, 'JobDescription': 'DeviceAction_Task',
+ 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {'Id': 3, 'Name': 'DeviceAction_Task'},
+ 'Params': [{'Key': 'operationName', 'Value': 'POWER_CONTROL'},
+ {'Key': 'powerState', 'Value': '2000'}],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [{'Data': '',
+ 'Id': 1234,
+ 'TargetType': {'Id': 'off',
+ 'Name': 'DEVICE'}}]})
+ ome_connection_powerstate_mock.get_all_report_details.return_value = {
+ 'report_list': [{"DeviceServiceTag": None, "Id": Constants.service_tag1,
+ "power_state": "on"}]}
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_resource(f_module, ome_connection_powerstate_mock)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target device " \
+ "service tag 'MXL1234' is invalid."
+
+ def test_get_device_resource_success_case03(self, mocker, ome_default_args, ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_id": Constants.service_tag1, "power_state": "coldboot", "Type": 1000,
+ "device_service_tag": Constants.service_tag1})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_state',
+ return_value=('off', 1000))
+ mocker.patch(MODULE_PATH + 'ome_powerstate.build_power_state_payload',
+ return_value={'Id': 0, 'JobDescription': 'DeviceAction_Task',
+ 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {'Id': 3, 'Name': 'DeviceAction_Task'},
+ 'Params': [{'Key': 'operationName', 'Value': 'POWER_CONTROL'},
+ {'Key': 'powerState', 'Value': '2000'}],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [{'Data': '',
+ 'Id': 1234,
+ 'TargetType': {'Id': 'off',
+ 'Name': 'DEVICE'}}]})
+ ome_connection_powerstate_mock.get_all_report_details.return_value = {
+ 'report_list': [{"DeviceServiceTag": Constants.service_tag1, "Id": Constants.service_tag1,
+ "power_state": "coldboot"}]}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_resource(f_module, ome_connection_powerstate_mock)
+ assert exc.value.args[0] == "No changes found to commit."
+
+ def test_get_device_resource_success_case04(self, mocker, ome_default_args, ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_id": Constants.service_tag1, "power_state": "on", "Type": 1000,
+ "device_service_tag": Constants.service_tag1})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_state',
+ return_value=(2, 1000))
+ mocker.patch(MODULE_PATH + 'ome_powerstate.build_power_state_payload',
+ return_value={'Id': 0, 'JobDescription': 'DeviceAction_Task',
+ 'JobName': 'DeviceAction_Task_PowerState',
+ 'JobType': {'Id': 3, 'Name': 'DeviceAction_Task'},
+ 'Params': [{'Key': 'operationName', 'Value': 'POWER_CONTROL'},
+ {'Key': 'powerState', 'Value': '2000'}],
+ 'Schedule': 'startnow',
+ 'State': 'Enabled',
+ 'Targets': [{'Data': '',
+ 'Id': 1234,
+ 'TargetType': {'Id': 'off',
+ 'Name': 'DEVICE'}}]})
+ ome_connection_powerstate_mock.get_all_report_details.return_value = {
+ 'report_list': [
+ {"DeviceServiceTag": Constants.service_tag1,
+ "Id": Constants.service_tag1, "power_state": "on"
+ }
+ ]
+ }
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_resource(f_module, ome_connection_powerstate_mock)
+ assert exc.value.args[0] == "No changes found to commit."
+
+ def test_get_device_resource_failed_case01(self, mocker, ome_default_args, ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_id": None, "power_state": "on", "Type": 1000,
+ "device_service_tag": "@#4"})
+ mocker.patch(MODULE_PATH + 'ome_powerstate.get_device_state',
+ return_value=('on', 1000))
+ ome_connection_powerstate_mock.get_all_report_details.return_value = {
+ 'report_list': [{"DeviceServiceTag": "@#4", "Id": None,
+ "power_state": "on"}]}
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_resource(f_module, ome_connection_powerstate_mock)
+ assert exc.value.args[0] == "Changes found to commit."
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_powerstate_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_powerstate_mock,
+ ome_response_mock):
+ ome_default_args.update({"device_service_tag": Constants.service_tag1, "power_state": "on"})
+ ome_response_mock.json_data = {"value": [{"device_service_tag": Constants.service_tag1, "power_state": "on",
+ "Id": Constants.device_id1}]}
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'ome_powerstate.get_device_resource',
+ side_effect=exc_type('test'))
+ mocker.patch(
+ MODULE_PATH + 'ome_powerstate.spawn_update_job',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'ome_powerstate.spawn_update_job',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ mocker.patch(
+ MODULE_PATH + 'ome_powerstate.get_device_resource',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'power_state' not in result
+ assert 'msg' in result
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py
new file mode 100644
index 00000000..91f7fc1b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py
@@ -0,0 +1,547 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_profile
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_profile.'
+CHANGES_MSG = "Changes found to be applied."
+NO_CHANGES_MSG = "No changes found to be applied."
+
+
+@pytest.fixture
+def ome_connection_mock_for_profile(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeProfile(FakeAnsibleModule):
+ module = ome_profile
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {"template_id": 123}, "success": True,
+ "json_data": {"value": [{"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}},
+ {"mparams": {"template_name": "temp1"}, "success": True,
+ "json_data": {"value": [{"Name": "temp1", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Name": "temp1", "Id": 123, "IdentityPoolId": 23}}])
+ def test_get_template_details(self, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_template_details(f_module, ome_connection_mock_for_profile)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {"device_id": 123}, "success": True,
+ "json_data": {"value": [{"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}},
+ {"mparams": {"device_service_tag": "ABC1234"}, "success": True,
+ "json_data": {"value": [{"Identifier": "ABC1234", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Identifier": "ABC1234", "Id": 123, "IdentityPoolId": 23}}])
+ def test_get_target_details(self, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_target_details(f_module, ome_connection_mock_for_profile)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {
+ "attributes": {
+ "Attributes": [
+ {
+ "Id": 93812,
+ "IsIgnored": False,
+ "Value": "Aisle Five"
+ },
+ {
+ "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name',
+ "IsIgnored": False,
+ "Value": "Aisle 5"
+ }
+ ]
+ }}, "success": True,
+ "json_data": {
+ "Id": 11,
+ "Name": "ProfileViewEditAttributes",
+ "AttributeGroupNames": [],
+ "AttributeGroups": [
+ {
+ "GroupNameId": 5,
+ "DisplayName": "System",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 33016,
+ "DisplayName": "Server Operating System",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93820,
+ "DisplayName": "ServerOS 1 Server Host Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ },
+ {
+ "GroupNameId": 33019,
+ "DisplayName": "Server Topology",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93812,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2248,
+ "DisplayName": "ServerTopology 1 Aisle Name",
+ "Description": None,
+ "Value": "Aisle 5",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93811,
+ "DisplayName": "ServerTopology 1 Data Center Name",
+ "Value": "BLG 2nd Floor DS 1",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93813,
+ "DisplayName": "ServerTopology 1 Rack Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93814,
+ "DisplayName": "ServerTopology 1 Rack Slot",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 9,
+ "DisplayName": "iDRAC",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32688,
+ "DisplayName": "Active Directory",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93523,
+ "DisplayName": "ActiveDirectory 1 Active Directory RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32930,
+ "DisplayName": "NIC Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93035,
+ "DisplayName": "NIC 1 DNS RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 92510,
+ "DisplayName": "NIC 1 Enable VLAN",
+ "Description": None,
+ "Value": "Disabled",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ }
+ ]
+ }
+ ],
+ "Attributes": []}]},
+ "diff": 2}])
+ def test_attributes_check(self, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.attributes_check(f_module, ome_connection_mock_for_profile,
+ params['mparams']['attributes'], 123)
+ assert result == params["diff"]
+
+ @pytest.mark.parametrize("params", [{"mparams": {"command": 'create'}, "func": "create_profile"},
+ {"mparams": {"command": 'modify'}, "func": "modify_profile"},
+ {"mparams": {"command": 'delete'}, "func": "delete_profile"},
+ {"mparams": {"command": 'assign'}, "func": "assign_profile"},
+ {"mparams": {"command": 'unassign'}, "func": "unassign_profile"},
+ {"mparams": {"command": 'migrate'}, "func": "migrate_profile"}])
+ def test_profile_operation(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ mocker.patch(MODULE_PATH + params.get('func'), return_value={"Id": 12})
+ f_module = self.get_module_mock(params=params["mparams"])
+ self.module.profile_operation(f_module, ome_connection_mock_for_profile)
+
+ @pytest.mark.parametrize("params", [{"mparams": {"name": "p1"}, "success": True, "json_data": {
+ "value": [{"Id": 123, "ProfileName": "p1"}]}, "res": {"Id": 123, "ProfileName": "p1"}}])
+ def test_get_profile(self, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_profile(ome_connection_mock_for_profile, f_module)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("params", [{"mparams": {
+ "command": "create", "template_name": "t1", "name_prefix": "profile",
+ "number_of_profiles": 2, "description": "Created 1",
+ "boot_to_network_iso": {
+ "boot_to_network": True,
+ "share_type": "CIFS",
+ "share_ip": "100.200.300",
+ "share_user": "shareuser",
+ "share_pwd": "sharepwd",
+ "workgroup": "workgroup",
+ "iso_path": "pathofiso.iso",
+ "iso_timeout": 8
+ }
+ },
+ "success": True,
+ "json_data": [1, 2],
+ "res": "Successfully created 2 profile(s)."},
+ {
+ "mparams":
+ {
+ "command": "create",
+ "template_name": "t1",
+ "name_prefix": "profile",
+ "number_of_profiles": 1
+ },
+ "success": True,
+ "json_data": [1],
+ "res": "Successfully created 1 profile(s)."},
+ {
+ "mparams":
+ {
+ "command": "create",
+ "template_name": "t1",
+ "name_prefix": "profile",
+ "number_of_profiles": 1
+ },
+ "success": True, "check_mode": True, "json_data": [1], "res": CHANGES_MSG}
+ ])
+ def test_create_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 12})
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.create_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "modify", "name": "profile"},
+ "success": True,
+ "prof": {}, "json_data": 0,
+ "res": "Profile with the name 'profile' not found."},
+ {"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile",
+ "description": "new description",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1",
+ "IsIgnored": True}]}}, "success": True,
+ "prof": {"Id": 1234,
+ "ProfileName": "jrofile 00002",
+ "ProfileDescription": "from source template t1",
+ "NetworkBootToIso": {"BootToNetwork": True, "ShareType": "NFS", "IsoPath": "abcd.iso",
+ "ShareDetail": {"IpAddress": "XX.XX.XX.XX", "ShareName": "XX.XX.XX.XX", },
+ "IsoTimeout": 4},
+ "ProfileState": 0, },
+ "json_data": 0,
+ "res": "Successfully modified the profile."},
+ {"mparams": {"command": "modify", "name": "myprofile", "new_name": "myprofile"},
+ "success": True,
+ "prof": {"Id": 1234, "ProfileName": "myprofile", "ProfileDescription": "my description"},
+ "json_data": 0, "res": "No changes found to be applied."},
+ {"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile",
+ "description": "new description",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso", "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1",
+ "IsIgnored": True}]}}, "success": True,
+ "prof": {"Id": 1234, "ProfileName": "jrofile 00002",
+ "ProfileDescription": "from source template t1",
+ "NetworkBootToIso": {
+ "BootToNetwork": True, "ShareType": "NFS", "IsoPath": "abcd.iso",
+ "ShareDetail": {"IpAddress": "XX.XX.XX.XX", "ShareName": "XX.XX.XX.XX"}, "IsoTimeout": 4},
+ "ProfileState": 0, },
+ "json_data": 0, "attributes_check": 2, "check_mode": True, "res": CHANGES_MSG}
+ ])
+ def test_modify_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof'))
+ mocker.patch(MODULE_PATH + 'attributes_check', return_value=params.get('attributes_check', 0))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.modify_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "delete", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4}, "json_data": 0,
+ "res": "Profile has to be in an unassigned state for it to be deleted."},
+ {"mparams": {"command": "delete", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0,
+ "res": "Successfully deleted the profile."},
+ {"mparams": {"command": "delete", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "check_mode": True,
+ "res": CHANGES_MSG},
+ {"mparams": {"command": "delete", "name": "profile"}, "success": True,
+ "prof": {}, "json_data": 0,
+ "res": "Profile with the name 'profile' not found."},
+ {"mparams": {"command": "delete", "filters": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0,
+ "res": "Successfully completed the delete operation."},
+ {"mparams": {"command": "delete", "filters": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0}, "json_data": 0, "check_mode": True,
+ "res": CHANGES_MSG},
+ ])
+ def test_delete_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof'))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.delete_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "assign", "name": "profile"}, "success": True,
+ "prof": {"Id": 123, "ProfileState": 1, "TargetName": "ABC1234"}, "json_data": 0,
+ "res": "The profile is assigned to a different target. Unassign the profile and then proceed with assigning the"
+ " profile to the target."},
+ {"mparams": {"command": "assign", "name": "profile"}, "success": True, "prof": {},
+ "json_data": 0, "res": "Profile with the name 'profile' not found."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234}, "success": True,
+ "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"},
+ "json_data": [234, 123],
+ "res": "The target device is invalid for the given profile."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123],
+ "res": "Successfully applied the assign operation."},
+ {"mparams": {"command": "assign", "name": "profile", "device_service_tag": "ABCDEFG",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True, "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"},
+ "json_data": [23, 123], "res": "Successfully applied the assign operation."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 4, "TargetId": 234}, "target": {"Id": 234, "Name": "mytarget"},
+ "json_data": [23, 123],
+ "res": "The profile is assigned to the target 234."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 4, "TargetId": 235}, "target": {"Id": 234, "Name": "mytarget"},
+ "json_data": [23, 123],
+ "res": "The profile is assigned to a different target. Use the migrate command or unassign the profile and "
+ "then proceed with assigning the profile to the target."},
+ {"mparams": {"command": "assign", "name": "profile", "device_service_tag": "STG1234",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 1, "TargetId": 235, "TargetName": "STG1234"}, "target": "Target invalid.",
+ "json_data": [23, 123],
+ "res": "The profile is assigned to the target STG1234."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 123,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 1, "TargetId": 235, "TargetName": "STG1234"}, "target": "Target invalid.",
+ "json_data": [23, 123],
+ "res": "Target invalid."},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True, "check_mode": True,
+ "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123],
+ "res": CHANGES_MSG},
+ {"mparams": {"command": "assign", "name": "profile", "device_id": 234,
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "iso_path": "path/to/my_iso.iso",
+ "iso_timeout": 8},
+ "attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
+ "success": True,
+ "prof": {"Id": 123, "ProfileState": 0, "DeploymentTaskId": 12}, "target": {"Id": 234, "Name": "mytarget"},
+ "json_data": [23, 123],
+ "res": "Successfully triggered the job for the assign operation."},
+ ])
+ def test_assign_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof'))
+ mocker.patch(MODULE_PATH + 'get_target_details', return_value=params.get('target'))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ with pytest.raises(Exception) as err:
+ self.module.assign_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "unassign", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0},
+ "json_data": 0, "res": "Profile is in an unassigned state."},
+ {"mparams": {"command": "unassign", "name": "profile"}, "success": True,
+ "prof": {}, "json_data": 0,
+ "res": "Profile with the name 'profile' not found."},
+ {"mparams": {"command": "unassign", "filters": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4},
+ "json_data": 0, "res": "Successfully applied the unassign operation. No job was triggered."},
+ {"mparams": {"command": "unassign", "filters": "profile"}, "success": True,
+ "json_data": 0, "prof": {"Id": 12, "ProfileState": 1},
+ "res": "Successfully applied the unassign operation. No job was triggered."},
+ {"mparams": {"command": "unassign", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123},
+ "json_data": {"LastRunStatus": {"Name": "Running"}},
+ "res": "Profile deployment task is in progress. Wait for the job to finish."},
+ {"mparams": {"command": "unassign", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123},
+ "json_data": {"LastRunStatus": {"Name": "Starting"}},
+ "res": "Successfully triggered a job for the unassign operation."},
+ {"mparams": {"command": "unassign", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "DeploymentTaskId": 123},
+ "json_data": {"LastRunStatus": {"Name": "Starting"}}, "check_mode": True,
+ "res": CHANGES_MSG}
+ ])
+ def test_unassign_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof'))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ with pytest.raises(Exception) as err:
+ self.module.unassign_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123},
+ "target": {"Id": 12},
+ "json_data": [1, 2, 3], "res": "Successfully triggered the job for the migrate operation."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {},
+ "target": {"Id": 12, "TargetId": 14}, "json_data": 0,
+ "res": "Profile with the name 'profile' not found."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 0, "TargetId": 14},
+ "target": {"Id": 13, "TargetId": 14}, "json_data": [1, 2, 3],
+ "res": "Profile needs to be in a deployed state for a migrate operation."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "TargetId": 12}, "target": {"Id": 12}, "json_data": 0,
+ "res": "No changes found to be applied."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123},
+ "target": "Target invalid.",
+ "json_data": [1, 2, 3], "res": "Target invalid."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123},
+ "target": {"Id": 12},
+ "json_data": [12, 21, 13], "res": "The target device is invalid for the given profile."},
+ {"mparams": {"command": "migrate", "name": "profile"}, "success": True,
+ "prof": {"Id": 12, "ProfileState": 4, "TargetId": 14, "DeploymentTaskId": 123},
+ "target": {"Id": 12}, "check_mode": True,
+ "json_data": [1, 2, 3], "res": CHANGES_MSG},
+ ])
+ def test_migrate_profile(self, mocker, params, ome_connection_mock_for_profile, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_profile', return_value=params.get('prof'))
+ mocker.patch(MODULE_PATH + 'get_target_details', return_value=params.get('target'))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
+ with pytest.raises(Exception) as err:
+ self.module.migrate_profile(f_module, ome_connection_mock_for_profile)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_profile_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_profile, ome_response_mock):
+ ome_default_args.update({"template_name": "t1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'profile_operation', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'profile_operation', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'profile_operation',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py
new file mode 100644
index 00000000..d83725d2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_server_interface_profile_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_server_interface_profile_info.'
+
+
+@pytest.fixture
+def ome_conn_mock_sip(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMEMSIP(FakeAnsibleModule):
+
+ module = ome_server_interface_profile_info
+
+ def test_check_domain_service(self, ome_conn_mock_sip, ome_default_args):
+ f_module = self.get_module_mock()
+ result = self.module.check_domain_service(f_module, ome_conn_mock_sip)
+ assert result is None
+
+ def test_get_sip_info(self, ome_conn_mock_sip, ome_response_mock):
+ f_module = self.get_module_mock(params={"device_id": [25011]})
+ ome_conn_mock_sip.get_all_report_details.return_value = {
+ "resp_obj": ome_response_mock, "report_list": [{"Id": 25012, "DeviceServiceTag": "HKRF20"}]
+ }
+ with pytest.raises(Exception) as err:
+ self.module.get_sip_info(f_module, ome_conn_mock_sip)
+ assert err.value.args[0] == "Unable to complete the operation because the entered target " \
+ "device id(s) '25011' are invalid."
+ f_module = self.get_module_mock(params={"device_id": [25012]})
+ ome_response_mock.json_data = {"Id": "HKRF20", "ServerServiceTag": "HKRF20", "value": [{"Network": []}]}
+ ome_conn_mock_sip.json_data = [{"Id": "HKRF20", "ServerServiceTag": "HKRF20"}]
+ ome_conn_mock_sip.strip_substr_dict.return_value = {"Id": "HKRF20", "ServerServiceTag": "HKRF20",
+ "Networks": [{"Id": 10001}]}
+ result = self.module.get_sip_info(f_module, ome_conn_mock_sip)
+ assert result[0]["Id"] == "HKRF20"
+
+ def test_main_case(self, ome_conn_mock_sip, ome_response_mock, ome_default_args, mocker):
+ ome_default_args.update({"device_id": None, "validate_certs": False})
+ with pytest.raises(Exception) as err:
+ self._run_module(ome_default_args)
+ assert err.value.args[0]['msg'] == "one of the following is required: device_id, device_service_tag."
+ ome_default_args.update({"device_id": [25011], "validate_certs": False})
+ mocker.patch(MODULE_PATH + 'check_domain_service')
+ mocker.patch(MODULE_PATH + 'get_sip_info', return_value={"server_profiles": [{"Id": 25011}]})
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Successfully retrieved the server interface profile information."
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_sip_power_main_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_sip, ome_response_mock):
+ ome_default_args.update({"device_id": [25011], "validate_certs": False})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py
new file mode 100644
index 00000000..dcb1688a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py
@@ -0,0 +1,699 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_server_interface_profiles
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+APPLY_TRIGGERED = "Successfully initiated the apply server profiles job."
+NO_STAG = "No profile found for service tag {service_tag}."
+CHANGES_MSG = "Changes found to be applied."
+NO_CHANGES_MSG = "No changes found to be applied."
+VLAN_NOT_FOUND = "The VLAN with a name {vlan_name} not found."
+DUPLICATE_NIC_IDENTIFIED = "Duplicate NIC identfiers provided."
+INVALID_UNTAGGED = "The untagged VLAN {id} provided for the NIC ID {nic_id} is not valid."
+NW_OVERLAP = "Network profiles of {service_tag} provided for tagged or untagged VLANs of {nic_id} overlaps."
+INVALID_DEV_ST = "Unable to complete the operation because the entered target device service tag(s) '{0}' are invalid."
+INVALID_DEV_ID = "Unable to complete the operation because the entered target device ids '{0}' are invalid."
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_server_interface_profiles.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_sips(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeSIPs(FakeAnsibleModule):
+ module = ome_server_interface_profiles
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"JobId": 1234},
+ 'message': APPLY_TRIGGERED, "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 2,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': INVALID_DEV_ST.format('ABC123'), "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC123"],
+ "nic_configuration": [],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': INVALID_DEV_ID.format('1111'), "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ 'mparams': {"job_wait": False, "device_id": [1111],
+ "nic_configuration": [],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': INVALID_UNTAGGED.format(id=10, nic_id="NIC.Mezzanine.1A-1-1"), "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 10},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': VLAN_NOT_FOUND.format(vlan_name='vlan_x'), "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"vlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["vlan_x"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': NO_CHANGES_MSG, "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan", "VLAN 1"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234}, "check_mode": True,
+ 'message': CHANGES_MSG, "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {"JobId": 1234},
+ 'message': DUPLICATE_NIC_IDENTIFIED, "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 2,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data":
+ {"Id": 14808,
+ "JobId": 1234,
+ "JobName": "Server profile(s) configuration task",
+ "JobDescription": "Applies bonding technology to profile and networks to NICs.",
+ "Value": "Successfully Applied bonding technology to profile and networks to NICs.",
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ },
+ 'message': "Successfully Applied bonding technology to profile and networks to NICs.", "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 2,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }},
+ {"json_data": {
+ "Id": 14808,
+ "JobId": 1234,
+ "JobName": "Server profile(s) configuration task",
+ "JobDescription": "Applies bonding technology to profile and networks to NICs.",
+ "Value": 1234, # to cause exception
+ "LastRunStatus": {
+ "@odata.type": "#JobService.JobStatus",
+ "Id": 2060,
+ "Name": "Completed"
+ },
+ },
+ 'message': "Applies bonding technology to profile and networks to NICs.", "success": True,
+ 'Devices': {"value": [{"Id": 123, "Identifier": "ABC1234"}]},
+ "_get_profile": {
+ "Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "_get_interface": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": [
+ 11569,
+ 10155
+ ],
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 2,
+ "Networks": [
+ 11569,
+ 10155,
+ 12350
+ ],
+ "NicBonded": False
+ }},
+ "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
+ 'mparams': {"device_service_tag": ["ABC1234"],
+ "nic_configuration": [{
+ "nic_identifier": "NIC.Mezzanine.1A-1-1",
+ "tagged_networks": {
+ "names": ["jagvlan"],
+ "state": "present"},
+ "team": False,
+ "untagged_network": 3},
+ {
+ "nic_identifier": "NIC.Mezzanine.1A-2-1",
+ "tagged_networks": {"names": ["range120-125"],
+ "state": "present"},
+ "team": True,
+ "untagged_network": 3}],
+ "nic_teaming": "NoTeaming",
+ }}
+ ])
+ def test_ome_sips_success_case(
+ self,
+ params,
+ ome_connection_mock_for_sips,
+ ome_response_mock,
+ ome_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_sips.get_all_items_with_pagination.return_value = params[
+ 'Devices']
+ mocker.patch(
+ MODULE_PATH +
+ '_get_profile',
+ return_value=params.get(
+ '_get_profile',
+ {}))
+ mocker.patch(
+ MODULE_PATH +
+ '_get_interface',
+ return_value=params.get(
+ '_get_interface',
+ {}))
+ mocker.patch(
+ MODULE_PATH + 'get_vlan_ids',
+ return_value=(
+ params.get('vlan_map'),
+ params.get('natives')))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get(
+ 'check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params",
+ [{"json_data": {"Id": "ABC1234",
+ "ServerServiceTag": "ABC1234",
+ "BondingTechnology": "NoTeaming"},
+ "service_tag": "ABC1234"}])
+ def test_ome_get_profile(
+ self,
+ params,
+ ome_connection_mock_for_sips,
+ ome_response_mock,
+ ome_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ f_module = self.get_module_mock(ome_default_args)
+ result = self.module._get_profile(
+ f_module,
+ ome_connection_mock_for_sips,
+ params.get("service_tag"))
+ assert result["Id"] == params.get("service_tag")
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {
+ "@odata.context": "/api/$metadata#Collection(NetworkService.ServerInterfaceProfile)",
+ "@odata.count": 2,
+ "value": [
+ {
+ "Id": "NIC.Mezzanine.1A-1-1",
+ "OnboardedPort": "59HW8X2:ethernet1/1/1",
+ "NativeVLAN": 3,
+ "NicBonded": False,
+ "FabricId": "f918826e-2515-4967-98f4-5488e810ca2e",
+ "Networks@odata.count": 2,
+ "Networks": [
+ {
+ "Id": 10155,
+ "Name": "jagvlan",
+ "Description": None,
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ },
+ {
+ "Id": 11569,
+ "Name": "VLAN 1",
+ "Description": "VLAN 1",
+ "VlanMaximum": 1,
+ "VlanMinimum": 1,
+ "Type": 2,
+ }
+ ]
+ },
+ {
+ "Id": "NIC.Mezzanine.1A-2-1",
+ "OnboardedPort": "6H7J6Z2:ethernet1/1/1",
+ "NativeVLAN": 3,
+ "NicBonded": False,
+ "FabricId": "f918826e-2515-4967-98f4-5488e810ca2e",
+ "Networks@odata.count": 3,
+ "Networks": [
+ {
+ "Id": 10155,
+ "Name": "jagvlan",
+ "Description": None,
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ },
+ {
+ "Id": 11569,
+ "Name": "VLAN 1",
+ "Description": "VLAN 1",
+ "VlanMaximum": 1,
+ "VlanMinimum": 1,
+ "Type": 2,
+ },
+ {
+ "Id": 12350,
+ "Name": "range120-125",
+ "Description": None,
+ "VlanMaximum": 125,
+ "VlanMinimum": 120,
+ "Type": 3,
+ }
+ ]
+ }
+ ]
+ },
+ "service_tag": "ABC1234", "intrfc": {
+ "NIC.Mezzanine.1A-1-1": {
+ "NativeVLAN": 3,
+ "Networks": {
+ 11569,
+ 10155
+ },
+ "NicBonded": False
+ },
+ "NIC.Mezzanine.1A-2-1": {
+ "NativeVLAN": 3,
+ "Networks": {
+ 11569,
+ 10155,
+ 12350
+ },
+ "NicBonded": False
+ }
+ }}])
+ def test_ome_get_interface(
+ self,
+ params,
+ ome_connection_mock_for_sips,
+ ome_response_mock,
+ ome_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ f_module = self.get_module_mock(ome_default_args)
+ result = self.module._get_interface(
+ f_module,
+ ome_connection_mock_for_sips,
+ params.get("service_tag"))
+ assert result == params.get("intrfc")
+
+ @pytest.mark.parametrize("params",
+ [{"json_data": {"@odata.context": "/api/$metadata#Collection(NetworkConfigurationService.Network)",
+ "@odata.count": 6,
+ "value": [{"Id": 10155,
+ "Name": "jagvlan",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ },
+ {"Id": 11569,
+ "Name": "VLAN 1",
+ "Description": "VLAN 1",
+ "VlanMaximum": 1,
+ "VlanMinimum": 1,
+ "Type": 2,
+ },
+ {"Id": 12350,
+ "Name": "range120-125",
+ "VlanMaximum": 125,
+ "VlanMinimum": 120,
+ "Type": 3,
+ },
+ {"Id": 12352,
+ "Name": "range130-135",
+ "VlanMaximum": 135,
+ "VlanMinimum": 130,
+ "Type": 4,
+ },
+ {"Id": 14679,
+ "Name": "two",
+ "VlanMaximum": 2,
+ "VlanMinimum": 2,
+ "Type": 1,
+ },
+ {"Id": 14681,
+ "Name": "three",
+ "VlanMaximum": 3,
+ "VlanMinimum": 3,
+ "Type": 3,
+ }]},
+ "vlan_map": {"jagvlan": 10155,
+ "VLAN 1": 11569,
+ "range120-125": 12350,
+ "range130-135": 12352,
+ "two": 14679,
+ "three": 14681},
+ "natives": {143: 10155,
+ 1: 11569,
+ 2: 14679,
+ 3: 14681,
+ 0: 0}}])
+ def test_ome_get_vlan_ids(
+ self,
+ params,
+ ome_connection_mock_for_sips,
+ ome_response_mock,
+ ome_default_args,
+ mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ vlan_map, natives = self.module.get_vlan_ids(
+ ome_connection_mock_for_sips)
+ assert vlan_map == params.get("vlan_map")
+ assert natives == params.get("natives")
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError,
+ ValueError,
+ SSLError,
+ TypeError,
+ ConnectionError,
+ HTTPError,
+ URLError])
+ def test_ome_sips_main_exception_failure_case(
+ self,
+ exc_type,
+ mocker,
+ ome_default_args,
+ ome_connection_mock_for_sips,
+ ome_response_mock):
+ ome_default_args.update({"device_service_tag": ["SRV1234"],
+ "nic_configuration": [{'nic_identifier': "NIC1"}]})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'get_valid_service_tags',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'get_valid_service_tags',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_valid_service_tags',
+ side_effect=exc_type('http://testhost.com',
+ 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py
new file mode 100644
index 00000000..5d275f19
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py
@@ -0,0 +1,1892 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 3.6.0
+# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_smart_fabric
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+CHECK_MODE_CHANGE_FOUND_MSG = "Changes found to be applied."
+CHECK_MODE_CHANGE_NOT_FOUND_MSG = "No Changes found to be applied."
+FABRIC_NOT_FOUND_ERROR_MSG = "The smart fabric '{0}' is not present in the system."
+DOMAIN_SERVICE_TAG_ERROR_MSG = "Unable to retrieve the domain information because the" \
+ " domain of the provided service tag {0} is not available."
+LEAD_CHASSIS_ERROR_MSG = "System should be a lead chassis if the assigned fabric topology type is {0}."
+SYSTEM_NOT_SUPPORTED_ERROR_MSG = "Fabric management is not supported on the specified system."
+DESIGN_MODEL_ERROR_MSG = "The network type of the {0} must be {1}."
+DEVICE_SERVICE_TAG_TYPE_ERROR_MSG = "The {0} type must be {1}."
+DEVICE_SERVICE_TAG_NOT_FOUND_ERROR_MSG = "Unable to retrieve the device information because the device" \
+ " with the provided service tag {0} is not available."
+IDEMPOTENCY_MSG = "Specified fabric details are the same as the existing settings."
+REQUIRED_FIELD = "Options 'fabric_design', 'primary_switch_service_tag' and 'secondary_switch_service_tag'" \
+ " are required for fabric creation."
+DUPLICATE_TAGS = "The switch details of the primary switch overlaps with the secondary switch details."
+PRIMARY_SWITCH_OVERLAP_MSG = "The primary switch service tag is overlapping with existing secondary switch details."
+SECONDARY_SWITCH_OVERLAP_MSG = "The switch details of the secondary switch overlaps with the existing primary" \
+ " switch details."
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+device_details = {
+ "Id": Constants.device_id1,
+ "Type": 4000,
+ "Identifier": "GTCT8T2",
+ "DeviceServiceTag": "GTCT8T2",
+ "ChassisServiceTag": "FPTN6Z2",
+ "Model": "MX9116n Fabric Engine",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 1000,
+ "SystemId": 2031,
+ "DeviceName": "IOM-A2",
+ "SlotConfiguration": {
+ "ChassisName": "MX-FPTN6Z2",
+ "SlotId": "13313",
+ "DeviceType": "4000",
+ "ChassisId": "13294",
+ "SlotNumber": "2",
+ "SledBlockPowerOn": "null",
+ "SlotName": "IOM-A2",
+ "ChassisServiceTag": "FPTN6Z2",
+ "SlotType": "4000"
+ },
+ "DeviceManagement": [
+ {
+ "ManagementId": 76383,
+ "NetworkAddress": Constants.hostname1,
+ "MacAddress": "00:00:00:00:00",
+ "ManagementType": 2,
+ "InstrumentationName": "MX9116n Fabric Engine",
+ "DnsName": "",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 76383,
+ "ProfileId": "FX7_BASE",
+ "ManagementId": 76383,
+ "AgentName": "",
+ "Version": "",
+ "ManagementURL": "",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-05-07 15:00:14.718"
+ }
+ ]
+ }
+ ]
+}
+all_fabric_details = [
+ {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_1",
+ "Description": "create new fabric1",
+ "OverrideLLDPConfiguration": "NA",
+ "ScaleVLANProfile": "NA",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('1312cceb-c3dd-4348-95c1-d8541a17d776')/FabricDesign"
+ }
+ },
+ {
+ "Id": "1312cceb-c3dd-4348-95c1-123456",
+ "Name": "Fabric_1_2",
+ "Description": "create new fabric2",
+ "OverrideLLDPConfiguration": "Enabled",
+ "ScaleVLANProfile": "NA",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('1312cceb-c3dd-4348-95c1-123456')/FabricDesign"
+ }
+ }
+]
+
+
+@pytest.fixture
+def ome_connection_mock_for_smart_fabric(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_smart_fabric.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeSmartFabric(FakeAnsibleModule):
+ module = ome_smart_fabric
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_main_ome_smart_fabric_exception_handling_case(self, exc_type, ome_default_args,
+ ome_connection_mock_for_smart_fabric,
+ ome_response_mock, mocker):
+ ome_default_args.update({"name": "name", "new_name": "new_name"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.fabric_actions',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.fabric_actions',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ for status_code, msg in {501: SYSTEM_NOT_SUPPORTED_ERROR_MSG, 400: 'http error message'}.items():
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.fabric_actions',
+ side_effect=exc_type('http://testhost.com', status_code, msg,
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert msg in result['msg']
+ assert 'msg' in result
+
+ def test_get_msm_device_details_success_case(self, ome_connection_mock_for_smart_fabric, ome_default_args, mocker):
+ """
+ success case: when provided design type and role type natches return the service tag and msm details
+ """
+ ome_default_args.update({"fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ resp_data = {
+ "Id": Constants.device_id1,
+ "value": [
+ {
+ "Id": 10086,
+ "DeviceId": 10061,
+ "PublicAddress": [
+ ome_default_args["hostname"],
+ "1000:mock_val"
+ ],
+ "Identifier": Constants.service_tag1,
+ "DomainRoleTypeValue": "LEAD",
+ "Version": "1.20.00",
+ },
+ {
+ "Id": 13341,
+ "DeviceId": 13294,
+ "PublicAddress": [
+ Constants.hostname2,
+ "1000:mocked_val"
+ ],
+ "Identifier": Constants.service_tag2,
+ "DomainTypeValue": "MSM",
+ "DomainRoleTypeValue": "MEMBER",
+ "Version": "1.20.00",
+ }
+ ]
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ service_tag, msm_version = self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag == Constants.service_tag1
+ assert msm_version == "1.20.00"
+
+ def test_get_msm_device_details_fqdn_success_case1(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ when hostname provided is fqdn and
+ success case: when provided design type and role type matches return the service tag and msm details
+ """
+ ome_default_args.update(
+ {"hostname": "XX-XXXX.yyy.lab", "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ resp_data = {
+ "Id": Constants.device_id1,
+ "value": [
+ {
+ "Id": 10086,
+ "DeviceId": 10061,
+ "PublicAddress": [
+ ome_default_args["hostname"],
+ "1000:mock_val"
+ ],
+ "Identifier": Constants.service_tag1,
+ "DomainRoleTypeValue": "LEAD",
+ "Version": "1.20.00",
+ },
+ {
+ "Id": 13341,
+ "DeviceId": 13294,
+ "PublicAddress": [
+ Constants.hostname2,
+ "1000:mocked_val"
+ ],
+ "Identifier": Constants.service_tag2,
+ "DomainTypeValue": "MSM",
+ "DomainRoleTypeValue": "MEMBER",
+ "Version": "1.20.00",
+ }
+ ]
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value="FKMLRZ2")
+ service_tag, msm_version = self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag == Constants.service_tag1
+ assert msm_version == "1.20.00"
+
+ def test_get_msm_device_details_fqdn_success_case2(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ when hostname provided is fqdn and
+ success case: when provided design type is same and fqdn is not of lead type
+ """
+ ome_default_args.update(
+ {"hostname": "XX-XXXX.yyy.lab", "fabric_design": "2xMX5108n_Ethernet_Switches_in_same_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ resp_data = {
+ "Id": Constants.device_id1,
+ "value": [
+ {
+ "Id": 10086,
+ "DeviceId": 10061,
+ "PublicAddress": [
+ Constants.hostname1,
+ "1000:mock_ipv6"
+ ],
+ "Identifier": Constants.service_tag1,
+ "DomainRoleTypeValue": "LEAD",
+ "Version": "1.20.00",
+ },
+ {
+ "Id": 13341,
+ "DeviceId": 13294,
+ "PublicAddress": [
+ Constants.hostname2,
+ "1001:mocked_ippv6"
+ ],
+ "Identifier": Constants.service_tag2,
+ "DomainTypeValue": "MSM",
+ "DomainRoleTypeValue": "MEMBER",
+ "Version": "1.20.10",
+ }
+ ]
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value=Constants.service_tag2)
+ service_tag, msm_version = self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag == Constants.service_tag2
+ assert msm_version == "1.20.10"
+
+ def test_get_msm_device_details_fqdn_failure_case1(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ when hostname provided is fqdn and
+ failure case: when provided design type is 2xMX9116n_Fabric_Switching_Engines_in_different_chassis
+ but fqdn is not of lead type
+ """
+ ome_default_args.update(
+ {"hostname": "XX-XXXX.yyy.lab", "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ f_module = self.get_module_mock(params=ome_default_args)
+ resp_data = {
+ "Id": Constants.device_id1,
+ "value": [
+ {
+ "Id": 10086,
+ "DeviceId": 10061,
+ "PublicAddress": [
+ Constants.hostname1,
+ "1000:mock_val"
+ ],
+ "Identifier": Constants.service_tag1,
+ "DomainRoleTypeValue": "LEAD",
+ "Version": "1.20.00",
+ },
+ {
+ "Id": 13341,
+ "DeviceId": 13294,
+ "PublicAddress": [
+ Constants.hostname2,
+ "1000:mocked_val"
+ ],
+ "Identifier": Constants.service_tag2,
+ "DomainTypeValue": "MSM",
+ "DomainRoleTypeValue": "MEMBER",
+ "Version": "1.20.00",
+ }
+ ]
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value=Constants.service_tag2)
+ with pytest.raises(Exception, match=LEAD_CHASSIS_ERROR_MSG.format(ome_default_args["fabric_design"])) as ex:
+ self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_get_msm_device_details_fqdn_failure_case2(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ when hostname provided is fqdn and
+ failure case: when provided fqdn not available in domain list should throw an error
+ """
+ ome_default_args.update(
+ {"hostname": "XX-XXXX.yyy.lab", "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ f_module = self.get_module_mock(params=ome_default_args)
+ resp_data = {
+ "value": [
+ ]
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value="FPTN6Z2")
+ with pytest.raises(Exception, match=SYSTEM_NOT_SUPPORTED_ERROR_MSG):
+ self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_get_msm_device_details_failure_case_01(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ raise exception if design type is 2xMX9116n_Fabric_Switching_Engines_in_different_chassis but domain type is not lead
+ """
+ ome_default_args.update({"fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ resp_data = {"Id": Constants.device_id1, "value": [
+ {
+ "@odata.id": "/api/ManagementDomainService/Domains(25038)",
+ "Id": 25038,
+ "DeviceId": Constants.device_id1,
+ "PublicAddress": [
+ ome_default_args["hostname"]
+ ],
+ "Name": "MX-2H5DNX2",
+ "Description": "PowerEdge MX7000",
+ "Identifier": Constants.service_tag1,
+ "DomainTypeId": 4000,
+ "DomainTypeValue": "MSM",
+ "DomainRoleTypeId": 3002,
+ "DomainRoleTypeValue": "STANDALONE",
+ "Version": "1.20.00",
+ "Local": True,
+ "GroupId": "d78ba475-f5d5-4dbb-97da-b4b1f190caa2",
+ "GroupName": None,
+ "BackupLead": False,
+ "Capabilities": [],
+ "BackupLeadHealth": 2000
+ }
+ ]}
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value=None)
+ with pytest.raises(Exception, match=LEAD_CHASSIS_ERROR_MSG.format(ome_default_args["fabric_design"])) as ex:
+ self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_get_msm_device_details_failure_case_02(self, ome_connection_mock_for_smart_fabric, ome_default_args,
+ mocker):
+ """
+ raise exception if there is no domain values in system
+ """
+ ome_default_args.update({"fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_ip_from_host',
+ return_value=ome_default_args["hostname"])
+ resp_data = {"Id": None, "value": [
+ ]}
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_service_tag_with_fqdn',
+ return_value=None)
+ with pytest.raises(Exception, match=SYSTEM_NOT_SUPPORTED_ERROR_MSG):
+ self.module.get_msm_device_details(ome_connection_mock_for_smart_fabric, f_module)
+
+ @pytest.mark.parametrize("modify_payload", [
+ {"Name": "Fabric-2"},
+ {"Name": "Fabric-1", "Description": "This is a fabric1."},
+ {"FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ], },
+ {
+ "FabricDesign": {
+ "Name": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"
+ }
+ },
+ {
+ "FabricDesignMapping": [
+ {"DesignNode": "Switch-B", "PhysicalNode": Constants.service_tag2},
+ {"DesignNode": "Switch-A", "PhysicalNode": Constants.service_tag1}]
+ }
+ ])
+ def test_compare_payloads_diff_case_01(self, modify_payload):
+ current_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ diff = self.module.compare_payloads(modify_payload, current_payload)
+ assert diff is True
+
+ @pytest.mark.parametrize("current_payload", [
+ {"Name": "Fabric-1", "Description": "This is a fabric1."},
+ {"Name": "Fabric-1", "Description": "This is a fabric.", "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ], "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }}])
+ def test_compare_payloads_diff_case_02(self, current_payload):
+ modify_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ diff = self.module.compare_payloads(modify_payload, current_payload)
+ assert diff is True
+
+ @pytest.mark.parametrize("modify_payload", [
+ {"Name": "Fabric-1", "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f"},
+ {"Name": "Fabric-1", "Description": "This is a fabric.", "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f", },
+ {"Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f", "Name": "Fabric-1", "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ], },
+ {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ },
+ {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ ])
+ def test_compare_payloads_no_diff_case_01(self, modify_payload):
+ current_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ val = self.module.compare_payloads(modify_payload, current_payload)
+ # print(val)
+ assert val is False
+
+ @pytest.mark.parametrize('val', [{'msg': CHECK_MODE_CHANGE_FOUND_MSG,
+ "current_payload": {"Name": "Fabric-1", "Description": "This is a fabric.",
+ "FabricDesignMapping": [{"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1},
+ {"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}},
+ "expected_payload": {"Name": "Fabric-1", "Description": "This is a fabric.",
+ "FabricDesignMapping": [{"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag2},
+ {"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag1}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}}},
+ {'msg': CHECK_MODE_CHANGE_NOT_FOUND_MSG,
+ "current_payload": {"Name": "Fabric-1", "Description": "This is a fabric.",
+ "FabricDesignMapping": [{"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1},
+ {"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}},
+ "expected_payload": {"Name": "Fabric-1", "Description": "This is a fabric.",
+ "FabricDesignMapping": [{"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1},
+ {"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}}},
+ {'msg': CHECK_MODE_CHANGE_NOT_FOUND_MSG, "current_payload": {"Name": "Fabric-1",
+ "Description": "This is list order change case.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1},
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}},
+ "expected_payload": {"Name": "Fabric-1",
+ "Description": "This is list order change case.",
+ "FabricDesignMapping": [{"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2},
+ {"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1}],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}}},
+ {'msg': CHECK_MODE_CHANGE_NOT_FOUND_MSG,
+ "current_payload": {'Id': 'fa9f1b12-c003-4772-8b90-601d0bf87c69',
+ 'Name': 'MX9116N', 'OverrideLLDPConfiguration': 'Disabled',
+ 'FabricDesignMapping': [
+ {'DesignNode': 'Switch-B', 'PhysicalNode': '6XLVMR2'},
+ {'DesignNode': 'Switch-A', 'PhysicalNode': '6XLTMR2'}],
+ 'FabricDesign': {
+ 'Name': '2xMX9116n_Fabric_Switching_Engines_in_different_chassis'}},
+ "expected_payload": {'Name': 'MX9116N', 'OverrideLLDPConfiguration': 'Disabled',
+ 'FabricDesignMapping': [
+ {'DesignNode': 'Switch-A', 'PhysicalNode': '6XLTMR2'},
+ {'DesignNode': 'Switch-B', 'PhysicalNode': '6XLVMR2'}],
+ 'FabricDesign': {
+ 'Name': '2xMX9116n_Fabric_Switching_Engines_in_different_chassis'},
+ 'Id': 'fa9f1b12-c003-4772-8b90-601d0bf87c69'}}
+ ])
+ def test_idempotency_check_for_state_present_modify_check_mode_case01(self, mocker, val):
+ f_module = self.get_module_mock(params={}, check_mode=True)
+ error_message = val["msg"]
+ with pytest.raises(Exception) as err:
+ self.module.idempotency_check_for_state_present("8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ val['current_payload'], val['expected_payload'],
+ f_module)
+ assert err.value.args[0] == error_message
+
+ def test_idempotency_check_for_state_present_modify_non_check_mode_case01(self, mocker):
+ f_module = self.get_module_mock(params={}, check_mode=False)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.compare_payloads',
+ return_value=False)
+ with pytest.raises(Exception, match=IDEMPOTENCY_MSG):
+ self.module.idempotency_check_for_state_present("8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ {}, {},
+ f_module)
+
+ def test_idempotency_check_for_state_present_create_non_check_mode_case01(self, mocker):
+ f_module = self.get_module_mock(params={}, check_mode=True)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.compare_payloads',
+ return_value=False)
+ with pytest.raises(Exception, match=CHECK_MODE_CHANGE_FOUND_MSG):
+ self.module.idempotency_check_for_state_present(None,
+ {}, {},
+ f_module)
+
+ def test_design_node_dict_update_case_01(self):
+ design_node_map = [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ]
+ val = self.module.design_node_dict_update(design_node_map)
+ assert val == {
+ 'PhysicalNode1': Constants.service_tag1,
+ 'PhysicalNode2': Constants.service_tag2
+ }
+
+ def test_design_node_dict_update_case_02(self):
+ design_node_map = [
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ]
+ val = self.module.design_node_dict_update(design_node_map)
+ assert val == {
+ 'PhysicalNode2': Constants.service_tag2
+ }
+
+ def test_design_node_dict_update_case_03(self):
+ design_node_map = [
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ]
+ val = self.module.design_node_dict_update(design_node_map)
+ assert val == {
+ 'PhysicalNode2': Constants.service_tag2
+ }
+
+ @pytest.mark.parametrize("modify_payload", [
+ {
+ 'PhysicalNode1': Constants.service_tag2,
+ 'PhysicalNode2': Constants.service_tag1
+ }
+ ])
+ def test_validate_switches_overlap_case_01(self, modify_payload):
+ current_dict = {
+ 'PhysicalNode1': Constants.service_tag1,
+ 'PhysicalNode2': Constants.service_tag2
+ }
+ modify_dict = modify_payload
+ f_module = self.get_module_mock(params={"primary_switch_service_tag": Constants.service_tag2,
+ "secondary_switch_service_tag": Constants.service_tag1
+ })
+ with pytest.raises(Exception, match="The modify operation does not support primary_switch_service_tag update."):
+ self.module.validate_switches_overlap(current_dict, modify_dict, f_module)
+
+ @pytest.mark.parametrize("modify_payload", [
+ {
+ 'PhysicalNode1': Constants.service_tag2,
+ 'PhysicalNode2': Constants.service_tag1
+ }
+ ])
+ def test_validate_switches_overlap_case_02(self, modify_payload):
+ current_dict = {
+ 'PhysicalNode1': Constants.service_tag2,
+ 'PhysicalNode2': Constants.service_tag1
+ }
+ modify_dict = modify_payload
+ f_module = self.get_module_mock(params={"primary_switch_service_tag": Constants.service_tag2,
+ "secondary_switch_service_tag": Constants.service_tag1
+ })
+ self.module.validate_switches_overlap(current_dict, modify_dict, f_module)
+
+ def test_validate_switches_overlap_case_03(self):
+ """
+ interchanging switches should be allowed
+ """
+ current_dict = {
+ 'PhysicalNode1': Constants.service_tag1,
+ 'PhysicalNode2': Constants.service_tag2
+ }
+ modify_dict = {
+ 'PhysicalNode1': Constants.service_tag1,
+ 'PhysicalNode2': Constants.service_tag2
+ }
+ f_module = self.get_module_mock(params={"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2
+ })
+ self.module.validate_switches_overlap(current_dict, modify_dict, f_module)
+
+ def test_fabric_design_map_payload_creation_case01(self, mocker):
+ modify_payload = [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ]
+ current_payload = [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "xyz123"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "abc456"
+ }
+ ]
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_switches_overlap', return_value=None)
+ f_module = self.get_module_mock(params={})
+ design_map = self.module.fabric_design_map_payload_creation(modify_payload, current_payload, f_module)
+ assert design_map == modify_payload
+
+ def test_fabric_design_map_payload_creation_case02(self, mocker):
+ modify_payload = [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ }
+ ]
+ current_payload = [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "xyz123"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "abc456"
+ }
+ ]
+ f_module = self.get_module_mock(params={})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_switches_overlap', return_value=None)
+ design_map = self.module.fabric_design_map_payload_creation(modify_payload, current_payload, f_module)
+ assert design_map == [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "abc456"
+ }
+ ]
+
+ def test_fabric_design_map_payload_creation_case03(self, mocker):
+ modify_payload = [
+ ]
+ current_payload = [
+ ]
+ f_module = self.get_module_mock(params={})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_switches_overlap', return_value=None)
+ design_map = self.module.fabric_design_map_payload_creation(modify_payload, current_payload, f_module)
+ assert design_map == []
+
+ def test_merge_payload_case_01(self):
+ modify_payload = {
+ "Name": "new_name",
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ }
+ current_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ f_module = self.get_module_mock(params={})
+ payload = self.module.merge_payload(modify_payload, current_payload, f_module)
+ assert payload["Name"] == modify_payload["Name"]
+ assert payload["Id"] == modify_payload["Id"]
+ assert payload["Description"] == current_payload["Description"]
+ assert payload["FabricDesignMapping"] == current_payload["FabricDesignMapping"]
+ assert payload["FabricDesign"] == current_payload["FabricDesign"]
+
+ def test_merge_payload_case_02(self):
+ modify_payload = {
+ "Name": "new_name",
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ }],
+ "FabricDesign": {
+ "Name": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ }
+ }
+ current_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "Description": "This is a fabric.",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ f_module = self.get_module_mock(params={})
+ payload = self.module.merge_payload(modify_payload, current_payload, f_module)
+ assert payload["Name"] == modify_payload["Name"]
+ assert payload["Id"] == modify_payload["Id"]
+ assert payload["Description"] == current_payload["Description"]
+ assert payload["FabricDesign"] == modify_payload["FabricDesign"]
+ assert payload["FabricDesignMapping"] == [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ]
+
+ def test_merge_payload_case_03(self):
+ modify_payload = {
+ "Name": "new_name",
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "FabricDesign": {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ }
+ current_payload = {
+ "Id": "8f25f714-9ea8-48e9-8eac-162d5d842e9f",
+ "Name": "Fabric-1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2
+ }
+ ],
+ "Description": "This is a fabric."
+ }
+ f_module = self.get_module_mock(params={})
+ payload = self.module.merge_payload(modify_payload, current_payload, f_module)
+ assert payload["Name"] == modify_payload["Name"]
+ assert payload["Id"] == modify_payload["Id"]
+ assert payload["Description"] == current_payload["Description"]
+ assert payload["FabricDesign"] == modify_payload["FabricDesign"]
+ assert payload["FabricDesignMapping"] == current_payload["FabricDesignMapping"]
+
+ def test_get_fabric_design(self, ome_connection_mock_for_smart_fabric, ome_response_mock):
+ resp_data = {
+ "Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ }
+ ome_response_mock.json_data = resp_data
+ fabric_design_uri = "/api/NetworkService/Fabrics('0bebadec-b61b-4b16-b354-5196396a4a18')/FabricDesign"
+ fabric_design = self.module.get_fabric_design(fabric_design_uri, ome_connection_mock_for_smart_fabric)
+ assert fabric_design == {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+
+ def test_get_current_payload(self, mocker, ome_connection_mock_for_smart_fabric):
+ fabric_details = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "OverrideLLDPConfiguration": "NA",
+ "ScaleVLANProfile": "NA",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('1312cceb-c3dd-4348-95c1-d8541a17d776')/FabricDesign"
+ }
+ }
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_design',
+ return_value={"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"})
+ payload = self.module.get_current_payload(fabric_details, ome_connection_mock_for_smart_fabric)
+ assert payload == {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+
+ def test_get_current_payload_case02(self, mocker, ome_connection_mock_for_smart_fabric):
+ fabric_details = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "NA",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('1312cceb-c3dd-4348-95c1-d8541a17d776')/FabricDesign"
+ }
+ }
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_design',
+ return_value={"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"})
+ payload = self.module.get_current_payload(fabric_details, ome_connection_mock_for_smart_fabric)
+ assert payload == {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "OverrideLLDPConfiguration": "Disabled",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+
+ @pytest.mark.parametrize("params, expected", [({"name": "fabric1"}, {"Name": "fabric1"}),
+ ({"name": "fabric1", "description": "fabric desc"},
+ {"Name": "fabric1", "Description": "fabric desc"}),
+ ({"name": "fabric1", "description": "fabric desc",
+ "override_LLDP_configuration": "Enabled"},
+ {"Name": "fabric1", "Description": "fabric desc",
+ "OverrideLLDPConfiguration": "Enabled"}
+ )])
+ def test_create_modify_payload_case_01(self, params, expected, ome_default_args):
+ ome_default_args.update(params)
+ payload = self.module.create_modify_payload(ome_default_args, None, "1.1")
+ assert payload == expected
+
+ def test_create_modify_payload_case_02(self, ome_default_args):
+ params = {"name": "fabric1", "new_name": "fabric2", "primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX5108n_Ethernet_Switches_in_same_chassis",
+ "override_LLDP_configuration": "Disabled"}
+ ome_default_args.update(params)
+ payload = self.module.create_modify_payload(ome_default_args, "1312cceb-c3dd-4348-95c1-d8541a17d776", "1.0")
+ assert payload["FabricDesignMapping"] == [{"DesignNode": "Switch-A",
+ "PhysicalNode": Constants.service_tag1},
+ {"DesignNode": "Switch-B",
+ "PhysicalNode": Constants.service_tag2}
+ ]
+ assert payload["Name"] == "fabric2"
+ assert "OverrideLLDPConfiguration" not in payload
+ assert payload["FabricDesign"] == {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ assert payload["Id"] == "1312cceb-c3dd-4348-95c1-d8541a17d776"
+
+ def test_get_fabric_id_cse_01(self):
+ fabric_id, fabric_id_details = self.module.get_fabric_id_details("Fabric_1", all_fabric_details)
+ assert fabric_id == "1312cceb-c3dd-4348-95c1-d8541a17d776"
+ assert fabric_id_details == all_fabric_details[0]
+
+ def test_get_fabric_id_cse_02(self):
+ fabric_id, fabric_id_details = self.module.get_fabric_id_details("Fabric_New", all_fabric_details)
+ assert fabric_id is None
+ assert fabric_id_details is None
+
+ def test_get_fabric_id_cse_03(self):
+ fabric_id, fabric_id_details = self.module.get_fabric_id_details("Fabric_1", [])
+ assert fabric_id is None
+ assert fabric_id_details is None
+
+ @pytest.mark.parametrize("identifier, expected_type", [("primary_switch_service_tag", "NETWORK_IOM"),
+ ("secondary_switch_service_tag", "NETWORK_IOM"),
+ ("hostname", "CHASSIS")])
+ def test_validate_device_type_case_01(self, ome_default_args, identifier, expected_type):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2})
+ f_module = self.get_module_mock(params={identifier: "val"})
+ with pytest.raises(Exception, match=DEVICE_SERVICE_TAG_TYPE_ERROR_MSG.format(identifier, expected_type)):
+ self.module.validate_device_type("SERVER", identifier, {}, f_module)
+
+ @pytest.mark.parametrize("identifier", ["primary_switch_service_tag", "secondary_switch_service_tag"])
+ def test_validate_device_type_case_02(self, ome_default_args, identifier):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX5108n_Ethernet_Switches_in_same_chassis"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ with pytest.raises(Exception, match=DESIGN_MODEL_ERROR_MSG.format(identifier, 'MX5108n')):
+ self.module.validate_device_type("NETWORK_IOM", identifier, device_details, f_module)
+
+ @pytest.mark.parametrize("identifier", ["primary_switch_service_tag", "secondary_switch_service_tag"])
+ def test_validate_device_type_case_03(self, ome_default_args, identifier):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.validate_device_type("NETWORK_IOM", identifier, device_details, f_module)
+
+ def test_validate_service_tag_case_01(self, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_device_type', return_value=None)
+ ome_connection_mock_for_smart_fabric.get_device_id_from_service_tag.return_value = {"value": device_details,
+ "Id": Constants.device_id1}
+ self.module.validate_service_tag(Constants.service_tag1, "primary_switch_service_tag",
+ {2000: "CHASSIS", 4000: "NETWORK_IOM",
+ 1000: "SERVER",
+ 3000: "STORAGE"}, ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_validate_service_tag_exception_case_01(self, mocker, ome_connection_mock_for_smart_fabric,
+ ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_device_type', return_value=None)
+ ome_connection_mock_for_smart_fabric.get_device_id_from_service_tag.return_value = {"value": {}, "Id": None}
+ with pytest.raises(Exception, match=DEVICE_SERVICE_TAG_NOT_FOUND_ERROR_MSG.format(Constants.service_tag1)):
+ self.module.validate_service_tag(Constants.service_tag1, "primary_switch_service_tag",
+ {2000: "CHASSIS", 4000: "NETWORK_IOM",
+ 1000: "SERVER",
+ 3000: "STORAGE"}, ome_connection_mock_for_smart_fabric, f_module)
+
+ @pytest.mark.parametrize("params", [{"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ },
+ {"primary_switch_service_tag": None,
+ "secondary_switch_service_tag": None,
+ }
+ ])
+ def test_validate_devices_case_01(self, params, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update(params)
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ ome_connection_mock_for_smart_fabric.get_device_type.return_value = {2000: "CHASSIS", 4000: "NETWORK_IOM",
+ 1000: "SERVER",
+ 3000: "STORAGE"}
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_service_tag', return_value=None)
+ self.module.validate_devices(Constants.service_tag1, ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_validate_devices_case_02(self, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag2,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_service_tag', return_value=None)
+ ome_connection_mock_for_smart_fabric.get_device_type.return_value = {2000: "CHASSIS",
+ 4000: "NETWORK_IOM",
+ 1000: "SERVER",
+ 3000: "STORAGE"}
+ self.module.validate_devices(Constants.service_tag1, ome_connection_mock_for_smart_fabric, f_module)
+
+ def test_required_field_check_for_create_case_01(self, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.required_field_check_for_create("fabric_id", f_module)
+
+ def test_required_field_check_for_create_case_02(self, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.required_field_check_for_create(None, f_module)
+
+ @pytest.mark.parametrize("params", [{"primary_switch_service_tag": Constants.service_tag1},
+ {"secondary_switch_service_tag": Constants.service_tag1},
+ {"fabric_design": Constants.service_tag1},
+ {"fabric_design": Constants.service_tag1,
+ "primary_switch_service_tag": Constants.service_tag1},
+ {"fabric_design": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag1},
+ {"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2},
+ {"primary_switch_service_tag": None,
+ "secondary_switch_service_tag": None},
+ {"primary_switch_service_tag": None,
+ "secondary_switch_service_tag": None}
+ ])
+ def test_required_field_check_for_create_case_03(self, params, ome_default_args):
+ ome_default_args.update(params)
+ f_module = self.get_module_mock(params=ome_default_args)
+ with pytest.raises(Exception, match=REQUIRED_FIELD):
+ self.module.required_field_check_for_create(None, f_module)
+
+ def test_process_output_case01(self, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+ f_module = self.get_module_mock(params=ome_default_args)
+ with pytest.raises(Exception, match="Fabric modification operation is initiated.") as err:
+ self.module.process_output("Fabric1", True, "Fabric modification operation is initiated.", "1234",
+ ome_connection_mock_for_smart_fabric, f_module)
+ err.value.fail_kwargs['fabric_id'] == "1234"
+
+ def test_process_output_case02(self, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+ f_module = self.get_module_mock(params=ome_default_args)
+ resp = {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo":
+ [
+ {
+ "MessageId": "CDEV7154",
+ "RelatedProperties": [],
+ "Message": "Fabric update is successful. The OverrideLLDPConfiguration attribute is not"
+ " provided "
+ " in the payload, so it preserves the previous value.",
+ "MessageArgs": [],
+ "Severity": "Informational",
+ "Resolution": "Please update the Fabric with the OverrideLLDPConfiguration as Disabled or"
+ " Enabled "
+ " if necessary. "
+ }
+ ]
+ }
+ }
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = {"value": all_fabric_details,
+ "total_count": 2}
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(all_fabric_details[0]["Id"], all_fabric_details[0]))
+ with pytest.raises(Exception, match="Fabric creation operation is initiated.") as err:
+ self.module.process_output("Fabric1", resp, "Fabric creation operation is initiated.", None,
+ ome_connection_mock_for_smart_fabric, f_module)
+ err.value.fail_kwargs['fabric_id'] == all_fabric_details[0]["Id"]
+ err.value.fail_kwargs['additional_info'] == resp
+
+ def test_process_output_case03(self, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+ f_module = self.get_module_mock(params=ome_default_args)
+ with pytest.raises(Exception, match="Fabric creation operation is initiated.") as err:
+ self.module.process_output("Fabric1", "1234", "Fabric creation operation is initiated.", None,
+ ome_connection_mock_for_smart_fabric, f_module)
+ err.value.fail_kwargs['fabric_id'] == "1234"
+
+ def test_create_modify_fabric_modify_case_01(self, ome_connection_mock_for_smart_fabric, ome_default_args, mocker,
+ ome_response_mock):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.required_field_check_for_create',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_msm_device_details',
+ return_value=(Constants.service_tag1, "1.1"))
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_devices', return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_modify', return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(all_fabric_details[0]["Id"], all_fabric_details[0]))
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.create_modify_payload',
+ return_value={"Name": "fabric2", "Description": "fabric desc2",
+ "OverrideLLDPConfiguration": "Enabled"})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_current_payload',
+ return_value={
+ "Name": "fabric1",
+ "Description": "fabric desc1",
+ "OverrideLLDPConfiguration": "Enabled",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "3QM4WV2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "GTCT8T2"
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"
+ }
+ })
+ mocker_merge_payload = mocker.patch(MODULE_PATH + 'ome_smart_fabric.merge_payload',
+ return_value={
+ "Name": "fabric2",
+ "Description": "fabric desc2",
+ "OverrideLLDPConfiguration": "Enabled",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "3QM4WV2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "GTCT8T2"
+ }
+ ],
+ "FabricDesign": {
+ "Name": "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"
+ }
+ })
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.idempotency_check_for_state_present', return_value=None)
+ mocker_process_output = mocker.patch(MODULE_PATH + 'ome_smart_fabric.process_output', return_value=None)
+ ome_response_mock.json_data = "true"
+ f_module = self.get_module_mock(params=ome_default_args)
+ self.module.create_modify_fabric("Fabric1", all_fabric_details, ome_connection_mock_for_smart_fabric,
+ f_module)
+ assert mocker_process_output.called
+ assert mocker_merge_payload.called
+
+ def test_create_modify_fabric_create_case_02(self, ome_connection_mock_for_smart_fabric, ome_default_args, mocker,
+ ome_response_mock):
+ ome_default_args.update({"primary_switch_service_tag": Constants.service_tag1,
+ "secondary_switch_service_tag": Constants.service_tag2,
+ "fabric_design": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ "state": "present"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.required_field_check_for_create',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_msm_device_details',
+ return_value=(Constants.service_tag1, "1.1"))
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.validate_devices', return_value=None)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(None, {}))
+ mocker_create_modify_payload = mocker.patch(MODULE_PATH + 'ome_smart_fabric.create_modify_payload',
+ return_value={"Name": "fabric2", "Description": "fabric desc2",
+ "OverrideLLDPConfiguration": "Enabled"})
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.idempotency_check_for_state_present', return_value=None)
+ ome_response_mock.json_data = "123456789abcd"
+ mocker_process_output = mocker.patch(MODULE_PATH + 'ome_smart_fabric.process_output', return_value=None)
+ self.module.create_modify_fabric("Fabric1", all_fabric_details, ome_connection_mock_for_smart_fabric,
+ f_module)
+ assert mocker_process_output.called
+ assert mocker_create_modify_payload.called
+
+ def test_check_fabric_exits_for_state_absent_non_check_mode_case01(self, mocker,
+ ome_connection_mock_for_smart_fabric,
+ ome_default_args):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(None, {}))
+ with pytest.raises(Exception, match=FABRIC_NOT_FOUND_ERROR_MSG.format("Fabric1")):
+ self.module.check_fabric_exits_for_state_absent(all_fabric_details[0], f_module, "Fabric1")
+
+ def test_check_fabric_exits_for_state_absent_non_check_mode_case02(self, mocker,
+ ome_connection_mock_for_smart_fabric,
+ ome_default_args):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(all_fabric_details[0]["Id"], all_fabric_details[0]))
+ fabric_id = self.module.check_fabric_exits_for_state_absent(all_fabric_details[0], f_module, "Fabric1")
+ assert fabric_id == all_fabric_details[0]["Id"]
+
+ def test_check_fabric_exits_for_state_absent_check_mode_case01(self, mocker,
+ ome_connection_mock_for_smart_fabric,
+ ome_default_args):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(None, {}))
+ with pytest.raises(Exception, match=CHECK_MODE_CHANGE_NOT_FOUND_MSG):
+ self.module.check_fabric_exits_for_state_absent(all_fabric_details[0], f_module, "Fabric1")
+
+ def test_check_fabric_exits_for_state_absent_check_mode_case02(self, mocker,
+ ome_connection_mock_for_smart_fabric,
+ ome_default_args):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.get_fabric_id_details',
+ return_value=(all_fabric_details[0]["Id"], all_fabric_details[0]))
+ with pytest.raises(Exception, match=CHECK_MODE_CHANGE_FOUND_MSG):
+ self.module.check_fabric_exits_for_state_absent(all_fabric_details[0], f_module, "Fabric1")
+
+ def test_delete_fabric(self, ome_connection_mock_for_smart_fabric, ome_default_args, mocker):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ mocker.patch(MODULE_PATH + 'ome_smart_fabric.check_fabric_exits_for_state_absent',
+ return_value=all_fabric_details[0]["Id"])
+ with pytest.raises(Exception, match="Fabric deletion operation is initiated.") as err:
+ self.module.delete_fabric(all_fabric_details, ome_connection_mock_for_smart_fabric, f_module, "Fabric1")
+ err.value.fail_kwargs['fabric_id'] == all_fabric_details[0]["Id"]
+
+ def test_fabric_actions_case_01(self, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({
+ "state": "absent",
+ "name": "Fabric1"
+ })
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = {"value": all_fabric_details,
+ "total_count": 2}
+ delete_fabric = mocker.patch(MODULE_PATH + 'ome_smart_fabric.delete_fabric',
+ return_value=None)
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ self.module.fabric_actions(ome_connection_mock_for_smart_fabric, f_module)
+ assert delete_fabric.called
+
+ def test_fabric_actions_case_02(self, mocker, ome_connection_mock_for_smart_fabric, ome_default_args):
+ ome_default_args.update({
+ "state": "present",
+ "name": "Fabric1"
+ })
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = {"value": all_fabric_details,
+ "total_count": 2}
+ create_modify_fabric = mocker.patch(MODULE_PATH + 'ome_smart_fabric.create_modify_fabric',
+ return_value=None)
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ self.module.fabric_actions(ome_connection_mock_for_smart_fabric, f_module)
+ assert create_modify_fabric.called
+
+ def test_get_service_tag_with_fqdn_success_case(self, ome_default_args, ome_connection_mock_for_smart_fabric):
+ ome_default_args.update({"hostname": "M-YYYY.abcd.lab"})
+ resp_data = {
+ "@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 2,
+ "value": [
+ {
+ "@odata.type": "#DeviceService.Device",
+ "@odata.id": "/api/DeviceService/Devices(Constants.device_id1)",
+ "Id": Constants.device_id1,
+ "Type": 2000,
+ "Identifier": Constants.service_tag1,
+ "DeviceServiceTag": Constants.service_tag1,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 4000,
+ "ConnectionState": True,
+ "AssetTag": None,
+ "SystemId": 2031,
+ "DeviceName": "MX-Constants.service_tag1",
+ "LastInventoryTime": "2020-07-11 17:00:18.925",
+ "LastStatusTime": "2020-07-11 09:00:07.444",
+ "DeviceSubscription": None,
+ "DeviceCapabilities": [
+ 18,
+ 8,
+ 201,
+ 202
+ ],
+ "SlotConfiguration": {
+ "ChassisName": None
+ },
+ "DeviceManagement": [
+ {
+ "ManagementId": 111111,
+ "NetworkAddress": ome_default_args["hostname"],
+ "MacAddress": "xx:yy:zz:x1x1",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag1",
+ "DnsName": "M-YYYY.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 111111,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 111111,
+ "ManagementURL": "https://" + ome_default_args["hostname"] + ":443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ },
+ {
+ "ManagementId": 33333,
+ "NetworkAddress": "[1234.abcd:5678:345]",
+ "MacAddress": "22:xx:yy:11",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag1",
+ "DnsName": "M-YYYY.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 33333,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 33333,
+ "ManagementURL": "https://[1234:abcd:567:xyzs]:443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ }
+ ],
+ "Actions": None
+ },
+ {
+ "@odata.type": "#DeviceService.Device",
+ "@odata.id": "/api/DeviceService/Devices(Constants.device_id1)",
+ "Id": Constants.device_id1,
+ "Type": 2000,
+ "Identifier": Constants.service_tag2,
+ "DeviceServiceTag": Constants.service_tag2,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 4000,
+ "ConnectionState": True,
+ "AssetTag": None,
+ "SystemId": 2031,
+ "DeviceName": "MX-Constants.service_tag2",
+ "LastInventoryTime": "2020-07-11 17:00:18.925",
+ "LastStatusTime": "2020-07-11 09:00:07.444",
+ "DeviceSubscription": None,
+ "DeviceCapabilities": [
+ 18,
+ 8,
+ 201,
+ 202
+ ],
+ "SlotConfiguration": {
+ "ChassisName": None
+ },
+ "DeviceManagement": [
+ {
+ "ManagementId": 111111,
+ "NetworkAddress": ome_default_args["hostname"],
+ "MacAddress": "xx:yy:zz:x1x1",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag2",
+ "DnsName": "M-XXXX.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 111111,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 111111,
+ "ManagementURL": "https://" + ome_default_args["hostname"] + ":443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ },
+ {
+ "ManagementId": 22222,
+ "NetworkAddress": "[1234.abcd:5678:345]",
+ "MacAddress": "22:xx:yy:11",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag2",
+ "DnsName": "M-XXXX.abcd.lab",
+ "ManagementProfile": [{
+ "ManagementProfileId": 22222,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 22222,
+ "ManagementURL": "https://[1234:abcd:567:xyzs]:443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }]
+ }
+ ],
+ "Actions": None
+ }
+ ]
+ }
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ service_tag = self.module.get_service_tag_with_fqdn(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag == Constants.service_tag1
+
+ def test_get_service_tag_with_fqdn_success_case2(self, ome_default_args, ome_connection_mock_for_smart_fabric):
+ ome_default_args.update({"hostname": Constants.hostname1})
+ resp_data = {
+ "@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 2,
+ "value": [
+ {
+ "@odata.type": "#DeviceService.Device",
+ "@odata.id": "/api/DeviceService/Devices(Constants.device_id1)",
+ "Id": Constants.device_id1,
+ "Type": 2000,
+ "Identifier": Constants.service_tag1,
+ "DeviceServiceTag": Constants.service_tag1,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 4000,
+ "ConnectionState": True,
+ "AssetTag": None,
+ "SystemId": 2031,
+ "DeviceName": "MX-Constants.service_tag1",
+ "LastInventoryTime": "2020-07-11 17:00:18.925",
+ "LastStatusTime": "2020-07-11 09:00:07.444",
+ "DeviceSubscription": None,
+ "DeviceCapabilities": [
+ 18,
+ 8,
+ 201,
+ 202
+ ],
+ "SlotConfiguration": {
+ "ChassisName": None
+ },
+ "DeviceManagement": [
+ {
+ "ManagementId": 111111,
+ "NetworkAddress": "192.168.1.1",
+ "MacAddress": "xx:yy:zz:x1x1",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag1",
+ "DnsName": "M-YYYY.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 111111,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 111111,
+ "ManagementURL": "https://" + ome_default_args["hostname"] + ":443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ },
+ {
+ "ManagementId": 33333,
+ "NetworkAddress": "[1234.abcd:5678:345]",
+ "MacAddress": "22:xx:yy:11",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag1",
+ "DnsName": "M-YYYY.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 33333,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 33333,
+ "ManagementURL": "https://[1234:abcd:567:xyzs]:443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ }
+ ],
+ "Actions": None
+ },
+ {
+ "@odata.type": "#DeviceService.Device",
+ "@odata.id": "/api/DeviceService/Devices(Constants.device_id1)",
+ "Id": Constants.device_id1,
+ "Type": 2000,
+ "Identifier": Constants.service_tag2,
+ "DeviceServiceTag": Constants.service_tag2,
+ "ChassisServiceTag": None,
+ "Model": "PowerEdge MX7000",
+ "PowerState": 17,
+ "ManagedState": 3000,
+ "Status": 4000,
+ "ConnectionState": True,
+ "AssetTag": None,
+ "SystemId": 2031,
+ "DeviceName": "MX-Constants.service_tag2",
+ "LastInventoryTime": "2020-07-11 17:00:18.925",
+ "LastStatusTime": "2020-07-11 09:00:07.444",
+ "DeviceSubscription": None,
+ "DeviceCapabilities": [
+ 18,
+ 8,
+ 201,
+ 202
+ ],
+ "SlotConfiguration": {
+ "ChassisName": None
+ },
+ "DeviceManagement": [
+ {
+ "ManagementId": 111111,
+ "NetworkAddress": "192.168.1.2",
+ "MacAddress": "xx:yy:zz:x1x1",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag2",
+ "DnsName": "M-XXXX.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 111111,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 111111,
+ "ManagementURL": "https://" + ome_default_args["hostname"] + ":443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ },
+ {
+ "ManagementId": 22222,
+ "NetworkAddress": "[1234.abcd:5678:345]",
+ "MacAddress": "22:xx:yy:11",
+ "ManagementType": 2,
+ "InstrumentationName": "MX-Constants.service_tag2",
+ "DnsName": "M-XXXX.abcd.lab",
+ "ManagementProfile": [
+ {
+ "ManagementProfileId": 22222,
+ "ProfileId": "MSM_BASE",
+ "ManagementId": 22222,
+ "ManagementURL": "https://[1234:abcd:567:xyzs]:443",
+ "HasCreds": 0,
+ "Status": 1000,
+ "StatusDateTime": "2020-07-11 17:00:18.925"
+ }
+ ]
+ }
+ ],
+ "Actions": None
+ }
+ ]
+ }
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ service_tag = self.module.get_service_tag_with_fqdn(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag is None
+
+ def test_get_service_tag_with_fqdn_success_case3(self, ome_default_args, ome_connection_mock_for_smart_fabric):
+ ome_default_args.update({"hostname": Constants.hostname1})
+ resp_data = {"value": []}
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ ome_connection_mock_for_smart_fabric.get_all_items_with_pagination.return_value = resp_data
+ service_tag = self.module.get_service_tag_with_fqdn(ome_connection_mock_for_smart_fabric, f_module)
+ assert service_tag is None
+
+ def test_fabric_validate_modify_case01(self, ome_default_args):
+ ome_default_args.update({"fabric_design": "2xMX5108n_Ethernet_Switches_in_same_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ def test_fabric_validate_modify_case02(self, ome_default_args):
+ ome_default_args.update({"name": "abc"})
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ def test_fabric_validate_modify_case03(self, ome_default_args):
+ ome_default_args.update({"fabric_design": "2xMX5108n_Ethernet_Switches_in_same_chassis"})
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"},
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "2HB7NX2"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ def test_fabric_validate_modify_case05(self, ome_default_args):
+ ome_default_args.update({"primary_switch_service_tag": "abc"})
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ def test_fabric_validate_modify_case07(self, ome_default_args):
+ ome_default_args.update({"name": "abc"})
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "2HBFNX2"
+ }
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ @pytest.mark.parametrize("param", [{"secondary_switch_service_tag": "abc"}, {"primary_switch_service_tag": "abc"}])
+ def test_fabric_validate_modify_case08(self, param, ome_default_args):
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesignMapping": [
+ ],
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
+
+ @pytest.mark.parametrize("param", [{"secondary_switch_service_tag": "abc"}, {"primary_switch_service_tag": "abc"}])
+ def test_fabric_validate_modify_case09(self, param, ome_default_args):
+ ome_default_args.update(param)
+ f_module = self.get_module_mock(params=ome_default_args, check_mode=True)
+ current_payload = {
+ "Id": "1312cceb-c3dd-4348-95c1-d8541a17d776",
+ "Name": "Fabric_",
+ "Description": "create new fabric1",
+ "FabricDesign": {"Name": "2xMX5108n_Ethernet_Switches_in_same_chassis"}
+ }
+ self.module.validate_modify(f_module, current_payload)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py
new file mode 100644
index 00000000..6670499e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py
@@ -0,0 +1,386 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_smart_fabric_uplink
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_smart_fabric_uplink.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_smart_fabric_uplink(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeSmartFabricUplink(FakeAnsibleModule):
+ module = ome_smart_fabric_uplink
+
+ @pytest.mark.parametrize("params",
+ [{"success": True, "json_data": {"value": [{"Name": "vlan_name", "Id": 123}]}, "id": 123},
+ {"success": True, "json_data": {"value": []}, "id": 0},
+ {"success": False, "json_data": {"value": [{"Name": "vlan_name", "Id": 123}]}, "id": 0},
+ {"success": True, "json_data": {"value": [{"Name": "vlan_name1", "Id": 123}]}, "id": 0}])
+ def test_get_item_id(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ id, vlans = self.module.get_item_id(ome_connection_mock_for_smart_fabric_uplink, "vlan_name", "uri")
+ assert id == params["id"]
+
+ @pytest.mark.parametrize(
+ "params", [{"uplinks": [{"Ports": [1, 2]}, {"Ports": []}], "portlist": [1, 2]},
+ {"uplinks": [{"Ports": [1, 2]}, {"Ports": [3, 4]}, {"Ports": [5, 4]}],
+ "portlist": [1, 2, 3, 4, 5, 4]},
+ {"uplinks": [{"Ports": [1, 2]}, {"Ports": [3, 4]}], "portlist": [1, 2, 3, 4]}, ])
+ def test_get_all_uplink_ports(self, params):
+ portlist = self.module.get_all_uplink_ports(params.get("uplinks"))
+ assert portlist == params.get("portlist")
+
+ @pytest.mark.parametrize("params", [{"inp": {"tagged_networks": ["vlan_name"]}, "success": True,
+ "json_data": {"ApplicableUplinkNetworks": [{"Name": "vlan_name", "Id": 123}]},
+ "payload": [123]}, ])
+ def test_validate_networks(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ vlan_payload = self.module.validate_networks(f_module, ome_connection_mock_for_smart_fabric_uplink, 1, 2)
+ assert vlan_payload == params["payload"]
+
+ @pytest.mark.parametrize("params", [{"inp": {"tagged_networks": ["vlan_name1"]}, "success": True,
+ "json_data": {"ApplicableUplinkNetworks": [{"Name": "vlan_name", "Id": 123}]},
+ "payload": [123],
+ "error_msg": "Networks with names vlan_name1 are not applicable or valid."},
+ {"inp": {"tagged_networks": ["vlan_name1", "vlan_name2"]}, "success": True,
+ "json_data": {"ApplicableUplinkNetworks": [{"Name": "vlan_name", "Id": 123}]},
+ "payload": [123],
+ "error_msg": "Networks with names {0} are not applicable "
+ "or valid.".format(
+ ",".join(set(["vlan_name1", "vlan_name2"])))}, ])
+ def test_validate_networks_failure(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.validate_networks(f_module, ome_connection_mock_for_smart_fabric_uplink, 1, 2)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"inp": {"primary_switch_service_tag": "ABC123", "primary_switch_ports": ["ethernet1/1/7", "ethernet1/1/4"]},
+ "success": True, "json_data": {
+ "InventoryInfo": [{"PortNumber": "ethernet1/1/6"}, {"PortNumber": "ethernet1/1/7"},
+ {"PortNumber": "ethernet1/1/4"}]}, "get_item_id": (0, []), "payload": [123],
+ "uplinks": [{"Ports": [{"Id": "ethernet1/1/6"}]}, {"Ports": [{"Id": "ethernet1/1/4"}]}],
+ "error_msg": "Device with service tag ABC123 does not exist."}])
+ def test_validate_ioms_failure(self, mocker, params, ome_connection_mock_for_smart_fabric_uplink,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id")))
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.validate_ioms(f_module, ome_connection_mock_for_smart_fabric_uplink, params.get("uplinks"))
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [
+ {"inp": {"primary_switch_service_tag": "ABC123", "primary_switch_ports": ["ethernet1/1/7", "ethernet1/1/4"]},
+ "success": True, "json_data": {
+ "InventoryInfo": [{"PortNumber": "ethernet1/1/6"}, {"PortNumber": "ethernet1/1/7"},
+ {"PortNumber": "ethernet1/1/4"}]}, "get_item_id": (2, []), "payload": [123],
+ "uplinks": [{"Ports": [{"Id": "ethernet1/1/6"}]}, {"Ports": [{"Id": "ethernet1/1/4"}]}],
+ "ioms": ['ABC123:ethernet1/1/7', 'ABC123:ethernet1/1/4']}])
+ def test_validate_ioms(self, mocker, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id")))
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ ioms = self.module.validate_ioms(f_module, ome_connection_mock_for_smart_fabric_uplink, params.get("uplinks"))
+ assert ioms == params.get("ioms")
+
+ @pytest.mark.parametrize("params", [{"inp": {"untagged_network": "vlan_name1"}, "success": True,
+ "json_data": {
+ "ApplicableUplinkNetworks": [{"Name": "vlan_name", "VlanMaximum": 123}]},
+ "vlan_id": 123,
+ "error_msg": "Native VLAN name vlan_name1 is not applicable or valid."}, ])
+ def test_validate_native_vlan_failure(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.validate_native_vlan(f_module, ome_connection_mock_for_smart_fabric_uplink, 1, 2)
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("params", [{"inp": {"untagged_network": "vlan_name"}, "success": True, "json_data": {
+ "ApplicableUplinkNetworks": [{"Name": "vlan_name", "VlanMaximum": 123}]}, "vlan_id": 123}, ])
+ def test_validate_native_vlan_failure(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ vlan_payload = self.module.validate_native_vlan(f_module, ome_connection_mock_for_smart_fabric_uplink, 1, 2)
+ assert vlan_payload == params["vlan_id"]
+
+ def test_delete_uplink(self, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {}
+ f_module = self.get_module_mock(params={"fabric_name": "f1", "name": "uplink1"})
+ with pytest.raises(Exception, match="Successfully deleted the uplink.") as err:
+ self.module.delete_uplink(f_module, ome_connection_mock_for_smart_fabric_uplink, 12, 123)
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_name": "f1", "name": "uplink1"},
+ "error_msg": "Mandatory parameter uplink_type not provided for uplink creation."},
+ {"inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet"},
+ "error_msg": "Mandatory parameter tagged_networks not provided for uplink creation."},
+ {"inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "FEthernet",
+ "tagged_networks": ["vlan1"]}, "get_item_id": (0, []),
+ "error_msg": "Uplink Type FEthernet does not exist."}, {
+ "inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet",
+ "tagged_networks": ["vlan1"]}, "get_item_id": (2, []),
+ "error_msg": "Provide port details."}, {
+ "inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet",
+ "tagged_networks": ["vlan1"],
+ "primary_switch_service_tag": "ABC123",
+ "secondary_switch_service_tag": "ABC123"}, "get_item_id": (2, []),
+ "error_msg": "Primary and Secondary service tags must not be the same."}, {
+ "inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet",
+ "tagged_networks": ["vlan1"],
+ "primary_switch_service_tag": "ABC123",
+ "secondary_switch_service_tag": "XYZ123"}, "get_item_id": (2, []),
+ "validate_ioms": ["ST1:123", "ST2:345"], "validate_networks": [1, 2],
+ "check_mode": True, "error_msg": "Changes found to be applied."}, {
+ "inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet",
+ "tagged_networks": ["vlan1"],
+ "primary_switch_service_tag": "ABC123",
+ "secondary_switch_service_tag": "XYZ123"}, "get_item_id": (2, []),
+ "validate_ioms": ["ST1:123", "ST2:345"], "validate_networks": [1, 2],
+ "error_msg": "Successfully created the uplink."}, {
+ "inp": {"fabric_name": "f1", "name": "uplink1", "uplink_type": "Ethernet",
+ "tagged_networks": ["vlan1"],
+ "primary_switch_service_tag": "ABC123",
+ "secondary_switch_service_tag": "XYZ123", "ufd_enable": "Enabled",
+ "description": "uplink description", "untagged_network": "vlan2"},
+ "get_item_id": (2, []), "validate_ioms": ["ST1:123", "ST2:345"],
+ "validate_networks": [1, 2], "validate_native_vlan": 1,
+ "error_msg": "Successfully created the uplink."}, ])
+ def test_create_uplink(self, mocker, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ f_module = self.get_module_mock(params=params.get("inp", {}), check_mode=params.get("check_mode", False))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id")))
+ mocker.patch(MODULE_PATH + "validate_ioms", return_value=(params.get("validate_ioms")))
+ mocker.patch(MODULE_PATH + "validate_networks", return_value=(params.get("validate_networks")))
+ mocker.patch(MODULE_PATH + "validate_native_vlan", return_value=(params.get("validate_native_vlan")))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.create_uplink(f_module, ome_connection_mock_for_smart_fabric_uplink, params.get("fabric_id", 0),
+ [])
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize(
+ "params", [{"inp": {"fabric_name": "f1", "name": "uplink1", "new_name": "uplink2",
+ "description": "modified from OMAM", "uplink_type": "Ethernet",
+ "ufd_enable": "Enabled", "untagged_network": "vlan2"},
+ "uplink_id": {"Id": "9cf5a5ee-aecc-45d1-a113-5c4055ab3b4c", "Name": "create1",
+ "Description": "CREATED from OMAM",
+ "MediaType": "Ethernet", "NativeVLAN": 0, "UfdEnable": "NA",
+ "Ports": [{"Id": "2HBFNX2:ethernet1/1/14"}, {"Id": "2HB7NX2:ethernet1/1/13"}],
+ "Networks": [{"Id": 36011}]},
+ "uplinks": [],
+ "get_item_id": (2, []), "validate_ioms": ["ST1:123", "ST2:345"],
+ "validate_networks": [1, 2], "validate_native_vlan": 1,
+ "error_msg": "Successfully modified the uplink."},
+ {"inp": {"fabric_name": "f1", "name": "uplink1", "new_name": "uplink2",
+ "description": "modified from OMAM", "uplink_type": "Ethernet",
+ "ufd_enable": "Enabled", "untagged_network": "vlan2"},
+ "uplink_id": {"Id": "9cf5a5ee-aecc-45d1-a113-5c4055ab3b4c", "Name": "create1",
+ "Description": "CREATED from OMAM", "MediaType": "Ethernet", "NativeVLAN": 0,
+ "UfdEnable": "NA",
+ "Ports": [{"Id": "2HBFNX2:ethernet1/1/14"}, {"Id": "2HB7NX2:ethernet1/1/13"}],
+ "Networks": [{"Id": 36011}]},
+ "uplinks": [], "get_item_id": (2, []),
+ "validate_ioms": ["ST1:123", "ST2:345"], "validate_networks": [1, 2], "validate_native_vlan": 1,
+ "check_mode": True, "error_msg": "Changes found to be applied."},
+ {"inp": {"fabric_name": "f1", "name": "uplink1", "new_name": "uplink2",
+ "uplink_type": "FEthernet"},
+ "uplink_id": {"Id": "9cf5a5ee-aecc-45d1-a113-5c4055ab3b4c", "Name": "create1",
+ "Description": "CREATED from OMAM",
+ "MediaType": "Ethernet", "NativeVLAN": 0, "UfdEnable": "NA",
+ "Ports": [{"Id": "2HBFNX2:ethernet1/1/14"}, {"Id": "2HB7NX2:ethernet1/1/13"}],
+ "Networks": [{"Id": 36011}]},
+ "uplinks": [], "get_item_id": (2, []),
+ "validate_ioms": ["ST1:123", "ST2:345"], "validate_networks": [1, 2],
+ "validate_native_vlan": 1, "error_msg": "Uplink Type cannot be modified."}, ])
+ def test_modify_uplink(self, mocker, params, ome_connection_mock_for_smart_fabric_uplink, ome_response_mock):
+ f_module = self.get_module_mock(params=params.get("inp", {}), check_mode=params.get("check_mode", False))
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id")))
+ mocker.patch(MODULE_PATH + "validate_ioms", return_value=(params.get("validate_ioms")))
+ mocker.patch(MODULE_PATH + "validate_networks", return_value=(params.get("validate_networks")))
+ mocker.patch(MODULE_PATH + "validate_native_vlan", return_value=(params.get("validate_native_vlan")))
+ error_message = params["error_msg"]
+ with pytest.raises(Exception) as err:
+ self.module.modify_uplink(f_module, ome_connection_mock_for_smart_fabric_uplink, params.get("fabric_id", 0),
+ params.get("uplink_id", {}), params.get("uplinks", []))
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize(
+ "params", [{"inp": {"name": "uplink1", "fabric_name": "fabric1"},
+ "error_msg": "state is present but any of the following are missing: new_name, description,"
+ " uplink_type, ufd_enable, primary_switch_service_tag, primary_switch_ports, "
+ "secondary_switch_service_tag, secondary_switch_ports, tagged_networks, untagged_network"},
+ {"inp": {"name": "uplink1"},
+ "error_msg": "missing required arguments: fabric_name"},
+ {"inp": {"fabric_name": "fabric1"},
+ "error_msg": "missing required arguments: name"}, ])
+ def test_main_case_failures(self, mocker, params, ome_default_args, ome_connection_mock_for_smart_fabric_uplink,
+ ome_response_mock):
+ ome_default_args.update(params.get("inp"))
+ ome_response_mock.json_data = params.get("json_data")
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id")))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params.get("error_msg")
+
+ @pytest.mark.parametrize(
+ "params", [{"inp": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ "get_item_id": (0, []), "error_msg": "Fabric with name fabric1 does not exist."},
+ {"inp": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ "get_item_id": (1, []),
+ "get_item_and_list": ({'Id': 1}, []), "error_msg": "Successfully deleted the uplink."},
+ {"inp": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ "get_item_id": (1, []),
+ "get_item_and_list": ({'Id': 1}, []), "check_mode": True,
+ "error_msg": "Changes found to be applied."}, ])
+ def _test_main_case_failures2(self, mocker, params, ome_default_args, ome_connection_mock_for_smart_fabric_uplink,
+ ome_response_mock):
+ ome_default_args.update(params.get("inp"))
+ ome_response_mock.json_data = params.get("json_data")
+ mocker.patch(MODULE_PATH + "get_item_id", return_value=(params.get("get_item_id", (0, []))))
+ mocker.patch(MODULE_PATH + "get_item_and_list", return_value=(params.get("get_item_and_list")))
+ result = self.execute_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result['msg'] == params.get("error_msg")
+
+ @pytest.mark.parametrize("params", [
+ {"fail_json": True, "json_data": {"JobId": 1234},
+ "get_item_id": (0, []),
+ "mparams": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ 'message': "Fabric with name fabric1 does not exist.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list": ({}, []), "check_mode": True,
+ "mparams": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ 'message': "No changes found to be applied to the uplink configuration.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list": ({}, []), "check_mode": False,
+ "mparams": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ 'message': "Uplink uplink1 does not exist.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list": ({"Name": 'u1', 'Id': 12}, []), "check_mode": True,
+ "mparams": {"state": "absent", "name": "uplink1", "fabric_name": "fabric1", "ufd_enable": "Enabled"},
+ 'message': "Changes found to be applied.", "success": True
+ },
+ {"fail_json": True, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list":
+ ({"Id": "12", "Name": "u1", "Description": "Ethernet_Uplink", "NativeVLAN": 1, "UfdEnable": "NA",
+ "Ports": [{"Id": "2HB7NX2:ethernet1/1/13", "Name": ""},
+ {"Id": "2HB7NX2:ethernet1/1/12", "Name": ""}],
+ "Networks": [{"Id": 31554, "Name": "VLAN2", }]},
+ [{"Id": "12", "Name": "u1", "Description": "Ethernet_Uplink", "NativeVLAN": 1, "UfdEnable": "NA",
+ "Ports": [{"Id": "2HB7NX2:ethernet1/1/13", "Name": "", },
+ {"Id": "2HB7NX2:ethernet1/1/12", "Name": "", }],
+ "Networks": [{"Id": 31554, "Name": "VLAN2", }]},
+ {"Name": 'u2', 'Id': 13}]),
+ "mparams": {"state": "present", "name": "u1", "fabric_name": "fabric1",
+ "primary_switch_service_tag": "SVTAG1", "primary_switch_ports": [1, 2],
+ "secondary_switch_service_tag": 'SVTAG1', "secondary_switch_ports": [1, 2]},
+ 'message': "Primary and Secondary service tags must not be the same.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list":
+ ({}, [{"Id": "12", "Name": "u1", "Description": "Ethernet_Uplink", "NativeVLAN": 1,
+ "UfdEnable": "NA", "Ports": [{"Id": "2HB7NX2:ethernet1/1/13", "Name": "", },
+ {"Id": "2HB7NX2:ethernet1/1/12", "Name": "", }],
+ "Networks": [{"Id": 31554, "Name": "VLAN2", }]}, {"Name": 'u2', 'Id': 13}]),
+ "validate_networks": ['a', 'b'], "validate_ioms": ['a', 'b'],
+ "mparams": {"state": "present", "name": "u1", "fabric_name": "fabric1", "uplink_type": 'Ethernet',
+ "tagged_networks": ['a', 'b'],
+ "primary_switch_service_tag": "SVTAG1", "primary_switch_ports": [1, 2],
+ "secondary_switch_service_tag": 'SVTAG2', "secondary_switch_ports": [1, 2]},
+ 'message': "Successfully created the uplink.", "success": True
+ },
+ {"fail_json": False, "json_data": {"JobId": 1234},
+ "get_item_id": (1, []), "get_item_and_list":
+ ({"Id": "12", "Name": "u1", "Description": "Ethernet_Uplink", "NativeVLAN": 1, "UfdEnable": "NA",
+ "Ports": [{"Id": "2HB7NX2:ethernet1/1/13", "Name": "", },
+ {"Id": "2HB7NX2:ethernet1/1/12", "Name": "", }],
+ "Networks": [{"Id": 31554, "Name": "VLAN2", }]},
+ [{"Id": "12", "Name": "u1", "Description": "Ethernet_Uplink", "NativeVLAN": 1,
+ "UfdEnable": "NA", "Ports": [{"Id": "2HB7NX2:ethernet1/1/13", "Name": "", },
+ {"Id": "2HB7NX2:ethernet1/1/12", "Name": "", }],
+ "Networks": [{"Id": 31554, "Name": "VLAN2", }]}, {"Name": 'u2', 'Id': 13}]),
+ "validate_networks": ['a', 'b'], "validate_ioms": ['a', 'b'],
+ "mparams": {"state": "present", "name": "u1", "fabric_name": "fabric1",
+ "tagged_networks": ['a', 'b'],
+ "primary_switch_service_tag": "SVTAG1", "primary_switch_ports": [1, 2],
+ "secondary_switch_service_tag": 'SVTAG2', "secondary_switch_ports": [1, 2]},
+ 'message': "Successfully modified the uplink.", "success": True
+ },
+ ])
+ def test_main(self, params, ome_connection_mock_for_smart_fabric_uplink, ome_default_args, ome_response_mock,
+ mocker):
+ mocker.patch(MODULE_PATH + 'get_item_id', return_value=params.get("get_item_id"))
+ mocker.patch(MODULE_PATH + 'get_item_and_list', return_value=params.get("get_item_and_list"))
+ mocker.patch(MODULE_PATH + 'validate_networks', return_value=params.get("validate_networks"))
+ mocker.patch(MODULE_PATH + 'validate_ioms', return_value=params.get("validate_ioms"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ if params.get("fail_json", False):
+ result = self._run_module_with_fail_json(ome_default_args)
+ else:
+ result = self._run_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result["msg"] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_smart_fabric_uplink_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_smart_fabric_uplink,
+ ome_response_mock):
+ ome_default_args.update({"name": "uplink1", "state": "present", "fabric_name": "f1", "new_name": "uplink2"})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_item_id', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_item_id', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_item_id',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'uplink_id' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py
new file mode 100644
index 00000000..27c84ffa
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py
@@ -0,0 +1,602 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.2.0
+# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_template
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_template.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_template(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ return ome_connection_mock_obj
+
+
+TEMPLATE_RESOURCE = {"TEMPLATE_RESOURCE": "TemplateService/Templates"}
+
+
+class TestOmeTemplate(FakeAnsibleModule):
+ module = ome_template
+
+ @pytest.fixture
+ def get_template_resource_mock(self, mocker):
+ response_class_mock = mocker.patch(
+ MODULE_PATH + '_get_resource_parameters')
+ return response_class_mock
+
+ def test_get_service_tags_success_case(self, ome_connection_mock_for_template, ome_response_mock):
+ ome_connection_mock_for_template.get_all_report_details.return_value = {
+ "report_list": [{"Id": Constants.device_id1,
+ "DeviceServiceTag": Constants.service_tag1}]}
+ f_module = self.get_module_mock({'device_id': [], 'device_service_tag': [Constants.service_tag1]})
+ data = self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ assert data == [Constants.device_id1]
+
+ def test_get_device_ids_failure_case01(self, ome_connection_mock_for_template, ome_response_mock, ome_default_args):
+ ome_response_mock.json_data = {'value': []}
+ ome_response_mock.success = False
+ f_module = self.get_module_mock(params={'device_id': ["#@!1"]})
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target device id(s) " \
+ "'{0}' are invalid.".format("#@!1")
+
+ @pytest.mark.parametrize("params",
+ [{"mparams": {
+ "attributes": {
+ "Attributes": [
+ {
+ "Id": 93812,
+ "IsIgnored": False,
+ "Value": "Aisle Five"
+ },
+ {
+ "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name',
+ "IsIgnored": False,
+ "Value": "Aisle 5"
+ }
+ ]
+ }}, "success": True,
+ "json_data": {
+ "Id": 11,
+ "Name": "ProfileViewEditAttributes",
+ "AttributeGroupNames": [],
+ "AttributeGroups": [
+ {
+ "GroupNameId": 5,
+ "DisplayName": "System",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 33016,
+ "DisplayName": "Server Operating System",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93820,
+ "DisplayName": "ServerOS 1 Server Host Name",
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ },
+ {
+ "GroupNameId": 33019,
+ "DisplayName": "Server Topology",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93812,
+ "DisplayName": "ServerTopology 1 Aisle Name",
+ "Value": "Aisle 5",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93811,
+ "DisplayName": "ServerTopology 1 Data Center Name",
+ "Description": None,
+ "Value": "BLG 2nd Floor DS 1",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93813,
+ "DisplayName": "ServerTopology 1 Rack Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 93814,
+ "DisplayName": "ServerTopology 1 Rack Slot",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 9,
+ "DisplayName": "iDRAC",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32688,
+ "DisplayName": "Active Directory",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93523,
+ "DisplayName": "ActiveDirectory 1 Active Directory RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32930,
+ "DisplayName": "NIC Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 93035,
+ "DisplayName": "NIC 1 DNS RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ },
+ {
+ "AttributeId": 92510,
+ "DisplayName": "NIC 1 Enable VLAN",
+ "Description": None,
+ "Value": "Disabled",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ }
+ ]
+ }
+ ],
+ "Attributes": []}]},
+ "diff": 2}])
+ def test_attributes_check(self, params, ome_connection_mock_for_template, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.attributes_check(f_module, ome_connection_mock_for_template,
+ params['mparams']['attributes'], 123)
+ assert result == params["diff"]
+
+ def test_get_device_ids_failure_case_02(self, ome_connection_mock_for_template, ome_response_mock,
+ ome_default_args):
+ ome_connection_mock_for_template.get_all_report_details.return_value = {
+ "report_list": [{"Id": Constants.device_id1,
+ "DeviceServiceTag": Constants.service_tag1},
+ {"Id": Constants.device_id2,
+ "DeviceServiceTag": "tag2"}
+ ]}
+ f_module = self.get_module_mock(params={'device_id': [Constants.device_id2], 'device_service_tag': ["abcd"]})
+ with pytest.raises(Exception) as exc:
+ self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ assert exc.value.args[0] == "Unable to complete the operation because the entered target service tag(s) " \
+ "'{0}' are invalid.".format('abcd')
+
+ def test_get_device_ids_for_no_device_failue_case_03(self, ome_connection_mock_for_template, ome_response_mock,
+ ome_default_args):
+ ome_connection_mock_for_template.get_all_report_details.return_value = {
+ "report_list": [{"Id": Constants.device_id1,
+ "DeviceServiceTag": Constants.service_tag1}
+ ], "resp_obj": ome_response_mock}
+ f_module = self.get_module_mock(params={'device_service_tag': [Constants.service_tag1], 'device_id': []})
+ with pytest.raises(Exception) as exc:
+ device_ids = self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ assert exc.value.args[0] == "Failed to fetch the device ids."
+
+ def test_get_view_id_success_case(self, ome_connection_mock_for_template, ome_response_mock):
+ ome_response_mock.json_data = {'value': [{"Description": "", 'Id': 2}]}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ result = self.module.get_view_id(ome_response_mock, "Deployment")
+ assert result == 2
+
+ create_payload = {"Fqdds": "All", # Mandatory for create
+ "ViewTypeId": 4, "attributes": {"Name": "create template name"}, "SourceDeviceId": 2224}
+
+ @pytest.mark.parametrize("param", [{"Fqdds": "All", # Mandatory for create
+ "ViewTypeId": 4, "attributes": {"Name": "create template name"},
+ "SourceDeviceId": 2224}])
+ def test_get_create_payload(self, param, ome_response_mock, ome_connection_mock_for_template):
+ f_module = self.get_module_mock(params=param)
+ data = self.module.get_create_payload(f_module, ome_connection_mock_for_template, 2224, 4)
+ assert data['Fqdds'] == "All"
+
+ def test_get_template_by_id_success_case(self, ome_response_mock):
+ ome_response_mock.json_data = {'value': []}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ data = self.module.get_template_by_id(f_module, ome_response_mock, 17)
+ assert data
+
+ def test_get_template_by_name_success_case(self, ome_response_mock, ome_connection_mock_for_template):
+ ome_response_mock.json_data = {'value': [{"Name": "test Sample Template import1", "Id": 24}]}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ data = self.module.get_template_by_name("test Sample Template import1", f_module,
+ ome_connection_mock_for_template)
+ assert data["Name"] == "test Sample Template import1"
+ assert data["Id"] == 24
+
+ def test_get_group_devices_all(self, ome_response_mock, ome_connection_mock_for_template):
+ ome_response_mock.json_data = {'value': [{"Name": "Device1", "Id": 24}]}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ f_module = self.get_module_mock()
+ data = self.module.get_group_devices_all(ome_connection_mock_for_template, "uri")
+ assert data == [{"Name": "Device1", "Id": 24}]
+
+ def _test_get_template_by_name_fail_case(self, ome_response_mock):
+ ome_response_mock.json_data = {'value': [{"Name": "template by name for template name", "Id": 12}]}
+ ome_response_mock.status_code = 500
+ ome_response_mock.success = False
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_template_by_name("template by name for template name", f_module, ome_response_mock)
+ assert exc.value.args[0] == "Unable to complete the operation because the" \
+ " requested template with name {0} is not present." \
+ .format("template by name for template name")
+
+ create_payload = {"command": "create", "device_id": [25007],
+ "ViewTypeId": 4, "attributes": {"Name": "texplate999", "Fqdds": "All"}, "template_view_type": 4}
+ inter_payload = {
+ "Name": "texplate999",
+ "SourceDeviceId": 25007,
+ "Fqdds": "All",
+ "TypeId": 2,
+ "ViewTypeId": 2
+ }
+ payload_out = ('TemplateService/Templates',
+ {
+ "Name": "texplate999",
+ "SourceDeviceId": 25007,
+ "Fqdds": "All",
+ "TypeId": 2,
+ "ViewTypeId": 2
+ }, "POST")
+
+ @pytest.mark.parametrize("params", [{"inp": create_payload, "mid": inter_payload, "out": payload_out}])
+ def test__get_resource_parameters_create_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template, params):
+ f_module = self.get_module_mock(params=params["inp"])
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=[25007])
+ mocker.patch(MODULE_PATH + 'get_view_id',
+ return_value=["Deployment"])
+ mocker.patch(MODULE_PATH + 'get_create_payload',
+ return_value=params["mid"])
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == params["out"]
+
+ modify_payload = {"command": "modify", "device_id": [25007], "template_id": 1234,
+ "ViewTypeId": 4, "attributes": {"Name": "texplate999", "Fqdds": "All"}, "template_view_type": 4}
+ inter_payload = {
+ "Name": "texplate999",
+ "SourceDeviceId": 25007,
+ "Fqdds": "All",
+ "TypeId": 2,
+ "ViewTypeId": 2
+ }
+ payload_out = ('TemplateService/Templates(1234)',
+ {
+ "Name": "texplate999",
+ "SourceDeviceId": 25007,
+ "Fqdds": "All",
+ "TypeId": 2,
+ "ViewTypeId": 2
+ }, "PUT")
+
+ @pytest.mark.parametrize("params", [{"inp": modify_payload, "mid": inter_payload, "out": payload_out}])
+ def test__get_resource_parameters_modify_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template, params):
+ f_module = self.get_module_mock(params=params["inp"])
+ mocker.patch(MODULE_PATH + 'get_template_by_id',
+ return_value={})
+ mocker.patch(MODULE_PATH + 'get_modify_payload',
+ return_value={})
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Templates(1234)', {}, 'PUT')
+
+ def test__get_resource_parameters_delete_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "delete", "template_id": 1234})
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Templates(1234)', {}, 'DELETE')
+
+ def test__get_resource_parameters_export_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "export", "template_id": 1234})
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Export', {'TemplateId': 1234}, 'POST')
+
+ def test__get_resource_parameters_deploy_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "deploy", "template_id": 1234})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=[Constants.device_id1])
+ mocker.patch(MODULE_PATH + 'get_deploy_payload',
+ return_value={"deploy_payload": "value"})
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Deploy', {"deploy_payload": "value"}, 'POST')
+
+ def test__get_resource_parameters_clone_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "clone", "template_id": 1234, "template_view_type": 2})
+ mocker.patch(MODULE_PATH + 'get_view_id',
+ return_value=2)
+ mocker.patch(MODULE_PATH + 'get_clone_payload',
+ return_value={"clone_payload": "value"})
+ mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Clone', {"clone_payload": "value"}, 'POST')
+
+ def test__get_resource_parameters_import_success_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "import", "template_id": 1234, "template_view_type": 2})
+ mocker.patch(MODULE_PATH + 'get_view_id',
+ return_value=2)
+ mocker.patch(MODULE_PATH + 'get_import_payload',
+ return_value={"import_payload": "value"})
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Import', {"import_payload": "value"}, 'POST')
+
+ @pytest.mark.parametrize("params", [{"inp": {"command": "modify"}, "mid": inter_payload, "out": payload_out}])
+ def test__get_resource_parameters_modify_template_none_failure_case(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template, params):
+ f_module = self.get_module_mock(params=params["inp"])
+ with pytest.raises(Exception) as exc:
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert exc.value.args[0] == "Enter a valid template_name or template_id"
+
+ @pytest.mark.parametrize("params",
+ [{"success": True, "json_data": {"value": [{"Name": "template_name", "Id": 123}]},
+ "id": 123, "gtype": True},
+ {"success": True, "json_data": {}, "id": 0, "gtype": False},
+ {"success": False, "json_data": {"value": [{"Name": "template_name", "Id": 123}]},
+ "id": 0, "gtype": False},
+ {"success": True, "json_data": {"value": [{"Name": "template_name1", "Id": 123}]},
+ "id": 12, "gtype": False}])
+ def test_get_type_id_valid(self, params, ome_connection_mock_for_template,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ id = self.module.get_type_id_valid(ome_connection_mock_for_template, params["id"])
+ assert id == params["gtype"]
+
+ @pytest.mark.parametrize("params",
+ [{"success": True, "json_data": {"value": [{"Description": "Deployment", "Id": 2}]},
+ "view": "Deployment", "gtype": 2},
+ {"success": True, "json_data": {}, "view": "Compliance", "gtype": 1},
+ {"success": False, "json_data": {"value": [{"Description": "template_name", "Id": 1}]},
+ "view": "Deployment", "gtype": 2},
+ {"success": True, "json_data": {"value": [{"Description": "template_name1", "Id": 2}]},
+ "view": "Deployment", "gtype": 2}])
+ def test_get_view_id(self, params, ome_connection_mock_for_template,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ id = self.module.get_view_id(ome_connection_mock_for_template, params["view"])
+ assert id == params["gtype"]
+
+ @pytest.mark.parametrize("param",
+ [{"pin": {"NetworkBootIsoModel": {"ShareDetail": {"Password": "share_password"}}}},
+ {"pin": {"NetworkBootIsoModel": {"ShareDetail": {"Password1": "share_password"}}}},
+ {"pin": {"NetworkBootIsoModel": {"ShareDetail": [{"Password1": "share_password"}]}}}])
+ def test_password_no_log(self, param):
+ attributes = param["pin"]
+ self.module.password_no_log(attributes)
+
+ def test__get_resource_parameters_create_failure_case_02(self, mocker, ome_response_mock,
+ ome_connection_mock_for_template):
+ f_module = self.get_module_mock({"command": "create", "template_name": "name"})
+ mocker.patch(MODULE_PATH + 'get_device_ids',
+ return_value=[Constants.device_id1, Constants.device_id2])
+ mocker.patch(MODULE_PATH + 'get_template_by_name',
+ return_value=("template", 1234))
+ with pytest.raises(Exception) as exc:
+ data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ assert exc.value.args[0] == "Create template requires only one reference device"
+
+ def test_main_template_success_case2(self, ome_default_args, mocker, module_mock, ome_connection_mock_for_template,
+ get_template_resource_mock, ome_response_mock):
+ ome_connection_mock_for_template.__enter__.return_value = ome_connection_mock_for_template
+ ome_connection_mock_for_template.invoke_request.return_value = ome_response_mock
+ ome_response_mock.json_data = {
+ "value": [{"device_id": "1111", "command": "create", "attributes": {"Name": "new 1template name"}}]}
+ ome_response_mock.status_code = 200
+ ome_default_args.update(
+ {"device_id": "1111", "command": "create", "attributes": {"Name": "new 1template name"}})
+ ome_response_mock.success = True
+ mocker.patch(MODULE_PATH + '_get_resource_parameters',
+ return_value=(TEMPLATE_RESOURCE, "template_payload", "POST"))
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is True
+ assert result['msg'] == "Successfully created a template with ID {0}".format(ome_response_mock.json_data)
+
+ def test_get_import_payload_success_case_01(self, ome_connection_mock_for_template):
+ f_module = self.get_module_mock(params={"attributes": {"Name": "template1", "Content": "Content"}})
+ self.module.get_import_payload(f_module, ome_connection_mock_for_template, 2)
+
+ def test_get_deploy_payload_success_case_01(self):
+ module_params = {"attributes": {"Name": "template1"}}
+ self.module.get_deploy_payload(module_params, [Constants.device_id1], 1234)
+
+ @pytest.mark.parametrize("param",
+ [{"mparams": {"attributes": {"Name": "template1"}}, "name": "template0",
+ "template_id": 123,
+ "clone_payload": {"SourceTemplateId": 123, "NewTemplateName": "template1",
+ "ViewTypeId": 2}}])
+ def test_get_clone_payload_success_case_01(self, param, ome_connection_mock_for_template):
+ f_module = self.get_module_mock(param["mparams"])
+ module_params = param["mparams"]
+ payload = self.module.get_clone_payload(f_module, ome_connection_mock_for_template, param['template_id'], 2)
+ assert payload == param['clone_payload']
+
+ @pytest.mark.parametrize("param",
+ [{"inp": {"command": "create", "template_name": "name", "device_id": [None],
+ "device_service_tag": [None]},
+ "msg": "Argument device_id or device_service_tag has null values"},
+ {"inp": {"command": "deploy", "template_name": "name", "device_id": [None],
+ "device_service_tag": [None]},
+ "msg": "Argument device_id or device_service_tag has null values"},
+ {"inp": {"command": "import", "template_name": "name", "device_id": [],
+ "device_service_tag": []},
+ "msg": "Argument 'Name' required in attributes for import operation"},
+ {"inp": {"command": "import", "attributes": {"Name": "name"}, "device_id": [],
+ "device_service_tag": []},
+ "msg": "Argument 'Content' required in attributes for import operation"},
+ {"inp": {"command": "clone", "template_name": "name", "device_id": [],
+ "device_service_tag": []},
+ "msg": "Argument 'Name' required in attributes for clone operation"}
+ ])
+ def test_validate_inputs(self, param, mocker):
+ f_module = self.get_module_mock(param["inp"])
+ mocker.patch(MODULE_PATH + 'password_no_log')
+ with pytest.raises(Exception) as exc:
+ self.module._validate_inputs(f_module)
+ assert exc.value.args[0] == param["msg"]
+
+ @pytest.mark.parametrize("param", [
+ {"inp": {"command": "deploy", "template_name": "name",
+ "device_group_names": ["mygroup"]},
+ "group": {'Id': 23, "Name": "mygroup"},
+ "dev_list": [1, 2, 3]}])
+ def test_get_group_details(self, param, ome_connection_mock_for_template, mocker,
+ ome_response_mock):
+ f_module = self.get_module_mock(param["inp"])
+ ome_response_mock.json_data = {
+ "value": [{'Id': 1, "Name": "mygroup3"}, {'Id': 2, "Name": "mygroup2"}, {'Id': 3, "Name": "mygroup"}]}
+ ome_response_mock.status_code = 200
+ mocker.patch(MODULE_PATH + 'get_group_devices_all', return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
+ dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module)
+ assert dev_list == param["dev_list"]
+
+ @pytest.mark.parametrize("param", [
+ {"inp": {"command": "deploy", "template_name": "name",
+ "device_group_names": ["mygroup"]},
+ "group": {'Id': 23, "Name": "mygroup"},
+ "dev_list": [1, 2, 3]}])
+ def test_modify_payload(self, param, ome_connection_mock_for_template, mocker,
+ ome_response_mock):
+ f_module = self.get_module_mock(param["inp"])
+ ome_response_mock.json_data = {
+ "value": [{'Id': 1, "Name": "mygroup3"}, {'Id': 2, "Name": "mygroup2"}, {'Id': 3, "Name": "mygroup"}]}
+ ome_response_mock.status_code = 200
+ mocker.patch(MODULE_PATH + 'get_group_devices_all', return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
+ dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module)
+ assert dev_list == param["dev_list"]
+
+ @pytest.mark.parametrize("params", [
+ {"mparams": {"command": "modify", "name": "profile", "attributes": {
+ "Attributes": [
+ {
+ "Id": 93812,
+ "IsIgnored": False,
+ "Value": "Aisle Five"
+ },
+ {
+ "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name',
+ "IsIgnored": False,
+ "Value": "Aisle 5"
+ }]}},
+ "success": True, "template": {"Name": "template_name", "Id": 123, "Description": "temp described"},
+ "json_data": 0, "get_template_by_name": {"Name": "template1", "Id": 122, "Description": "temp described"},
+ "res": "No changes found to be applied."},
+ {"mparams": {"command": "modify", "name": "profile", "attributes": {
+ "Name": "new_name",
+ "Attributes": [
+ {
+ "Id": 93812,
+ "IsIgnored": False,
+ "Value": "Aisle Five"
+ },
+ {
+ "DisplayName": 'System, Server Topology, ServerTopology 1 Aisle Name',
+ "IsIgnored": False,
+ "Value": "Aisle 5"
+ }]}}, "success": True,
+ "template": {"Name": "template_name", "Id": 123, "Description": "temp described"}, "json_data": 0,
+ "get_template_by_name": {"Name": "template1", "Id": 122, "Description": "temp described"},
+ "res": "Template with name 'new_name' already exists."}
+ ])
+ def test_modify_payload(self, params, ome_connection_mock_for_template, mocker,
+ ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(MODULE_PATH + 'get_template_by_name', return_value=params.get('get_template_by_name'))
+ mocker.patch(MODULE_PATH + 'attributes_check', return_value=params.get('attributes_check', 0))
+ f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ error_message = params["res"]
+ with pytest.raises(Exception) as err:
+ self.module.get_modify_payload(f_module, ome_connection_mock_for_template, params.get('template'))
+ assert err.value.args[0] == error_message
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, TypeError, ConnectionError,
+ HTTPError, URLError, SSLError])
+ def test_main_template_exception_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_template, ome_response_mock):
+ ome_default_args.update({"command": "export", "template_name": "t1", 'attributes': {'Attributes': "myattr1"}})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'password_no_log')
+ mocker.patch(MODULE_PATH + '_get_resource_parameters', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + '_get_resource_parameters', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + '_get_resource_parameters',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py
new file mode 100644
index 00000000..0e6cbca4
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.1.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_template_identity_pool
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ssl import SSLError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_template_identity_pool.'
+template1 = \
+ {
+ "@odata.context": "/api/$metadata#TemplateService.Template",
+ "@odata.type": "#TemplateService.Template",
+ "@odata.id": "/api/TemplateService/Templates(9)",
+ "Id": 9,
+ "Name": "template",
+ "Description": None,
+ "Content": None,
+ "SourceDeviceId": 10116,
+ "TypeId": 2,
+ "ViewTypeId": 2,
+ "TaskId": 10125,
+ "HasIdentityAttributes": True,
+ "Status": 2060,
+ "IdentityPoolId": 1,
+ "IsPersistencePolicyValid": True,
+ "IsStatelessAvailable": True,
+ "IsBuiltIn": False,
+ "CreatedBy": "admin",
+ "CreationTime": "2022-02-02 09:33:25.887057",
+ "LastUpdatedBy": "admin",
+ "LastUpdatedTime": "2022-02-02 13:53:37.443315",
+ "Views@odata.navigationLink": "/api/TemplateService/Templates(9)/Views",
+ "AttributeDetails": {
+ "@odata.id": "/api/TemplateService/Templates(9)/AttributeDetails"
+ }
+ }
+
+
+@pytest.fixture
+def ome_connection_mock_template_identity_pool(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMETemplateIdentityPool(FakeAnsibleModule):
+ module = ome_template_identity_pool
+
+ @pytest.mark.parametrize("exc_type", [HTTPError, URLError, ValueError, TypeError, ConnectionError, SSLError])
+ def test_main_template_identity_failure(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_template_identity_pool):
+ ome_default_args.update({"template_name": "template"})
+ ome_connection_mock_template_identity_pool.json_data = {"template_name": "ansible_template"}
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'get_template_id',
+ side_effect=exc_type('url error'))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'get_template_id',
+ side_effect=exc_type('error'))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(
+ MODULE_PATH + 'get_identity_id',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str))
+ )
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ def test_main_success(self, mocker, ome_default_args, ome_connection_mock_template_identity_pool,
+ ome_response_mock):
+ mocker.patch(MODULE_PATH + "get_template_id", return_value=template1)
+ mocker.patch(MODULE_PATH + "get_identity_id", return_value=10)
+ ome_default_args.update({"template_name": "template", "identity_pool_name": "pool_name"})
+ ome_response_mock.json_data = {"msg": "Successfully assigned identity pool to template.", "changed": True}
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ result = self.execute_module(ome_default_args)
+ assert "msg" in result
+ assert result["msg"] == "Successfully attached identity pool to " \
+ "template."
+
+ def test_get_template_vlan_info(self, ome_connection_mock_template_identity_pool, ome_response_mock):
+ f_module = self.get_module_mock(params={"nic_identifier": "NIC Slot 4"})
+ temp_net_details = {
+ "AttributeGroups": [
+ {
+ "GroupNameId": 1001,
+ "DisplayName": "NICModel",
+ "SubAttributeGroups": [{
+ "GroupNameId": 1,
+ "DisplayName": "NIC Slot 4",
+ "SubAttributeGroups": [],
+ "Attributes": []
+ }],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 1005,
+ "DisplayName": "NicBondingTechnology",
+ "SubAttributeGroups": [],
+ "Attributes": [{"AttributeId": 0,
+ "DisplayName": "Nic Bonding Technology",
+ "Description": None, "Value": "LACP",
+ "IsIgnored": False}]
+ }
+ ]
+ }
+ ome_response_mock.success = True
+ ome_response_mock.json_data = temp_net_details
+ nic_bonding_tech = self.module.get_template_vlan_info(ome_connection_mock_template_identity_pool, 12)
+ assert nic_bonding_tech == "LACP"
+
+ def test_get_template_id(self, ome_connection_mock_template_identity_pool, ome_response_mock):
+ ome_response_mock.json_data = {"value": [{"Name": "template", "Id": 9, "IdentityPoolId": 1}]}
+ ome_response_mock.success = True
+ f_module = self.get_module_mock(params={"template_name": "template"})
+ res_temp = self.module.get_template_id(ome_connection_mock_template_identity_pool, f_module)
+ assert res_temp == {"Name": "template", "Id": 9, "IdentityPoolId": 1}
+
+ def test_get_identity_id(self, ome_connection_mock_template_identity_pool):
+ data = {"report_list": [{"Name": "pool_name", "Id": 10}]}
+ ome_connection_mock_template_identity_pool.get_all_report_details.return_value = data
+ f_module = self.get_module_mock(params={"identity_pool_name": "pool_name"})
+ result = self.module.get_identity_id(ome_connection_mock_template_identity_pool, f_module)
+ assert result == 10
+
+ def test_get_identity_id_fail(self, ome_connection_mock_template_identity_pool, ome_response_mock):
+ data = {"report_list": [{"Name": "pool_name", "Id": 10}]}
+ ome_connection_mock_template_identity_pool.get_all_report_details.return_value = data
+ f_module = self.get_module_mock(params={"identity_pool_name": "invalid_pool_name"})
+ with pytest.raises(Exception) as exc:
+ self.module.get_identity_id(ome_connection_mock_template_identity_pool, f_module)
+ assert exc.value.args[0] == "Unable to complete the operation because the requested identity pool with " \
+ "name 'invalid_pool_name' is not present."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py
new file mode 100644
index 00000000..8f8bb328
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_template_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_template_info_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_template_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeTemplateInfo(FakeAnsibleModule):
+ module = ome_template_info
+
+ @pytest.mark.parametrize("module_params,data", [({"system_query_options": {"filter": "abc"}}, "$filter")])
+ def test_get_query_parameters(self, module_params, data):
+ res = self.module._get_query_parameters(module_params)
+ if data is not None:
+ assert data in res
+ else:
+ assert res is None
+
+ def test_get_template_info_success_case01(self, ome_default_args, ome_connection_template_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = {"value": [""]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert 'template_info' in result
+
+ def test_get_template_info_success_case02(self, mocker, ome_default_args, ome_connection_template_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"template_id": "24"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"template_id": "24"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'template_info' in result
+
+ def test_get_template_info_success_case03(self, mocker, ome_default_args, ome_connection_template_info_mock,
+ ome_response_mock):
+ mocker.patch(MODULE_PATH + 'ome_template_info._get_query_parameters',
+ return_value={"filter": "abc"})
+ ome_default_args.update({"system_query_options": {"filter": "abc"}})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"filter": "abc"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'template_info' in result
+
+ def test_get_template_info_failure_case(self, ome_default_args, ome_connection_template_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 500
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == 'Failed to fetch the template facts'
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_ome_template_info_main_exception_handling_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_template_info_mock, ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_template_info_mock.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_connection_template_info_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'template_info' not in result
+ assert 'msg' in result
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py
new file mode 100644
index 00000000..c182b2b9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py
@@ -0,0 +1,349 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_template_network_vlan
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_FOUND = "Changes found to be applied."
+SUCCESS_MSG = "Successfully applied the network settings to the template."
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_template_network_vlan.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_template_network_vlan(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeTemplateNetworkVlan(FakeAnsibleModule):
+ module = ome_template_network_vlan
+
+ @pytest.mark.parametrize("params", [{"mparams": {"template_id": 123}, "success": True, "json_data": {
+ "value": [{"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}},
+ {"mparams": {"template_name": "vlan_name"}, "success": True, "json_data": {
+ "value": [{"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}]},
+ "res": {"Name": "vlan_name", "Id": 123, "IdentityPoolId": 23}}])
+ def test_get_template_details(
+ self, params, ome_connection_mock_for_template_network_vlan, ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params["mparams"])
+ result = self.module.get_template_details(
+ f_module, ome_connection_mock_for_template_network_vlan)
+ assert result == params["res"]
+
+ @pytest.mark.parametrize("kv", [{"key": "1", "dct": {"one": "1", "two": "2"}, "res": "one"},
+ {"key": "3", "dct": {"one": "1", "two": "2"}, "res": None}])
+ def test_get_key(self, kv):
+ val = kv["key"]
+ d = kv["dct"]
+ k = self.module.get_key(val, d)
+ assert k == kv["res"]
+
+ def test_get_vlan_name_id_map(
+ self, ome_connection_mock_for_template_network_vlan, ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {
+ "value": [{"Name": "vlan1", "Id": 1}, {"Name": "vlan2", "Id": 2}]}
+ d = self.module.get_vlan_name_id_map(
+ ome_connection_mock_for_template_network_vlan)
+ assert d == {"vlan1": 1, "vlan2": 2}
+
+ def test_get_template_vlan_info(
+ self, ome_connection_mock_for_template_network_vlan, ome_response_mock):
+ f_module = self.get_module_mock(
+ params={"nic_identifier": "NIC Slot 4"})
+ temp_net_details = {"AttributeGroups": [{"GroupNameId": 1001, "DisplayName": "NICModel", "SubAttributeGroups": [
+ {"GroupNameId": 1, "DisplayName": "NIC Slot 4", "SubAttributeGroups": [{"GroupNameId": 1,
+ "SubAttributeGroups": [
+ {"GroupNameId": 1,
+ "DisplayName": "Partition",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {"CustomId": 2302,
+ "DisplayName": "Vlan Tagged",
+ "Value": "12765, 12767, 12768"},
+ {"CustomId": 2302,
+ "DisplayName": "Vlan UnTagged",
+ "Value": "12766"}]}],
+ "Attributes": []},
+ {"GroupNameId": 2,
+ "DisplayName": "Port ",
+ "SubAttributeGroups": [
+ {"GroupNameId": 1,
+ "DisplayName": "Partition ",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {"CustomId": 2301,
+ "DisplayName": "Vlan Tagged",
+ "Value": "12766"},
+ {"CustomId": 2301,
+ "DisplayName": "Vlan UnTagged",
+ "Value": "12767"}]}],
+ "Attributes": []}],
+ "Attributes": []}], "Attributes": []}, {"GroupNameId": 1005, "DisplayName": "NicBondingTechnology",
+ "SubAttributeGroups": [], "Attributes": [
+ {"AttributeId": 0, "CustomId": 0, "AttributeEditInfoId": 0, "DisplayName": "Nic Bonding Technology",
+ "Description": None, "Value": "NIC bonding enabled", "IsReadOnly": False, "IsIgnored": False,
+ "IsSecure": False, "IsLinkedToSecure": False, "TargetSpecificTypeId": 0}]}]}
+ ome_response_mock.success = True
+ ome_response_mock.json_data = temp_net_details
+ port_id_map, port_untagged_map, port_tagged_map, port_nic_bond_map, nic_bonding_tech = self.module.get_template_vlan_info(
+ f_module, ome_connection_mock_for_template_network_vlan, 12)
+ assert port_id_map == {1: 2302, 2: 2301}
+ assert port_untagged_map == {1: 12766, 2: 12767}
+ assert port_tagged_map == {1: [12765, 12767, 12768], 2: [12766]}
+
+ def test_get_vlan_payload(
+ self, mocker, ome_connection_mock_for_template_network_vlan):
+ f_module = self.get_module_mock(params={"template_id": 12})
+ untag_dict = {1: 12766}
+ tagged_dict = {2: [12765, 12766]}
+ port_id_map = {1: 2302, 2: 2301}
+ port_untagged_map = {1: 12766, 2: 12767}
+ port_tagged_map = {1: [12765, 12767, 12768], 2: [12766]}
+ port_nic_bond_map = {1: True, 2: False}
+ nic_bonding_tech = "LACP"
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Name": "vlan_name", "Id": 12, "IdentityPoolId": 23})
+ mocker.patch(MODULE_PATH + 'get_template_vlan_info', return_value=(
+ port_id_map, port_untagged_map, port_tagged_map, port_nic_bond_map, nic_bonding_tech))
+ payload = self.module.get_vlan_payload(f_module, ome_connection_mock_for_template_network_vlan, untag_dict,
+ tagged_dict)
+ assert payload["TemplateId"] == 12
+ assert payload["VlanAttributes"] == [
+ {"ComponentId": 2302, "Tagged": [
+ 12765, 12767, 12768], "Untagged": 12766, 'IsNicBonded': True},
+ {"ComponentId": 2301, "Tagged": [12765, 12766], "Untagged": 12767, 'IsNicBonded': False}]
+
+ @pytest.mark.parametrize("params", [
+ {"untag_dict": {1: 12766}, "tagged_dict": {2: [12765, 12766]},
+ "port_id_map": {1: 2302, 2: 2301}, "port_untagged_map": {1: 12766}, "port_tagged_map": {2: [12765, 12766]},
+ "mparams": {"template_id": 12}, "port_nic_bond_map": {1: True, 2: False}, 'nic_bonding_tech': "LACP",
+ 'message': "No changes found to be applied."},
+ {"untag_dict": {3: 12766}, "tagged_dict": {2: [12765, 12766]},
+ "port_id_map": {1: 2302, 2: 2301}, "port_untagged_map": {1: 12766}, "port_tagged_map": {2: [12765, 12766]},
+ "mparams": {"template_id": 12}, "port_nic_bond_map": {1: True, 2: False}, 'nic_bonding_tech': "LACP",
+ 'message': "Invalid port(s) dict_keys([3]) found for untagged VLAN"},
+ {"untag_dict": {1: 12766}, "tagged_dict": {3: [12765, 12766]},
+ "port_id_map": {1: 2302, 2: 2301}, "port_untagged_map": {1: 12766}, "port_tagged_map": {2: [12765, 12766]},
+ "mparams": {"template_id": 12}, "port_nic_bond_map": {1: True, 2: False}, 'nic_bonding_tech': "LACP",
+ 'message': "Invalid port(s) dict_keys([3]) found for tagged VLAN"},
+ ])
+ def test_get_vlan_payload_msg(
+ self, params, ome_connection_mock_for_template_network_vlan, ome_default_args, ome_response_mock, mocker):
+ f_module = self.get_module_mock(params=params['mparams'])
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Name": "vlan_name", "Id": 12, "IdentityPoolId": 23})
+ mocker.patch(MODULE_PATH + 'get_template_vlan_info', return_value=(
+ params['port_id_map'], params['port_untagged_map'], params['port_tagged_map'],
+ params['port_nic_bond_map'], params['nic_bonding_tech']))
+ with pytest.raises(Exception) as exc:
+ self.module.get_vlan_payload(f_module, ome_connection_mock_for_template_network_vlan, params['untag_dict'],
+ params['tagged_dict'])
+ assert exc.value.args[0] == params["message"]
+
+ def test_validate_vlans(
+ self, mocker, ome_connection_mock_for_template_network_vlan):
+ f_module = self.get_module_mock(params={
+ "tagged_networks": [{"port": 1, "tagged_network_ids": [1, 2]}, {"port": 2, "tagged_network_names": []},
+ {"port": 3, "tagged_network_names": ["bronze"]}],
+ "untagged_networks": [{"port": 1, "untagged_network_name": "plat"}, {"port": 2, "untagged_network_id": 0},
+ {"port": 3, "untagged_network_id": 4}]})
+ mocker.patch(MODULE_PATH + 'get_vlan_name_id_map',
+ return_value={"vlan1": 1, "vlan2": 2, "gold": 3, "silver": 4, "plat": 5, "bronze": 6})
+ untag_dict, tagged_dict = self.module.validate_vlans(
+ f_module, ome_connection_mock_for_template_network_vlan)
+ assert untag_dict == {1: 5, 2: 0, 3: 4}
+ assert tagged_dict == {1: [1, 2], 2: [], 3: [6]}
+
+ @pytest.mark.parametrize("params", [
+ {"inp": {"untagged_networks": [{"port": 2, "untagged_network_name": "plat"},
+ {"port": 2, "untagged_network_id": 0}]},
+ "msg": "port 2 is repeated for untagged_network_id"},
+ {"inp": {"tagged_networks": [{"port": 1, "tagged_network_ids": [1, 7]}, {"port": 2, "tagged_network_names": []},
+ {"port": 3, "tagged_network_names": ["bronze"]}]},
+ "msg": "7 is not a valid vlan id port 1"},
+ {"inp": {"tagged_networks": [{"port": 1, "tagged_network_ids": []},
+ {"port": 3, "tagged_network_names": ["bronzy"]}]},
+ "msg": "bronzy is not a valid vlan name port 3"},
+ {"inp": {"untagged_networks": [{"port": 2, "untagged_network_name": "platy"},
+ {"port": 3, "untagged_network_id": 0}]},
+ "msg": "platy is not a valid vlan name for port 2"},
+ {"inp": {"untagged_networks": [{"port": 2, "untagged_network_name": "plat"},
+ {"port": 1, "untagged_network_id": 7}]},
+ "msg": "untagged_network_id: 7 is not a valid vlan id for port 1"},
+ {"inp": {"tagged_networks": [{"port": 1, "tagged_network_ids": [1]}],
+ "untagged_networks": [{"port": 1, "untagged_network_id": 1}]},
+ "msg": "vlan 1('vlan1') cannot be in both tagged and untagged list for port 1"}])
+ def test_validate_vlans_failure(
+ self, params, mocker, ome_connection_mock_for_template_network_vlan):
+ f_module = self.get_module_mock(params["inp"])
+ mocker.patch(MODULE_PATH + 'get_vlan_name_id_map',
+ return_value={"vlan1": 1, "vlan2": 2, "gold": 3, "silver": 4, "plat": 5, "bronze": 6})
+ with pytest.raises(Exception) as exc:
+ self.module.validate_vlans(
+ f_module, ome_connection_mock_for_template_network_vlan)
+ assert exc.value.args[0] == params["msg"]
+
+ @pytest.mark.parametrize("modify_setting_payload",
+ [{"Description": "Identity pool with ethernet and fcoe settings2"}, {"Name": "pool2"},
+ {"EthernetSettings": {
+ "Mac": {"IdentityCount": 61, "StartingMacAddress": "UFBQUFAA"}}},
+ {"Description": "Identity pool with ethernet and fcoe settings2",
+ "EthernetSettings": {"Mac": {"IdentityCount": 60, "StartingMacAddress": "UFBQUFAA"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}}])
+ def test_compare_payload_attributes_case_false(
+ self, modify_setting_payload):
+ existing_setting_payload = {"@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(23)", "Id": 23,
+ "Name": "pool1", "Description": "Identity pool with ethernet and fcoe settings1",
+ "CreatedBy": "admin", "CreationTime": "2020-01-31 09:28:16.491424",
+ "LastUpdatedBy": "admin", "LastUpdateTime": "2020-01-31 09:49:59.012549",
+ "EthernetSettings": {
+ "Mac": {"IdentityCount": 60, "StartingMacAddress": "UFBQUFAA"}},
+ "IscsiSettings": None,
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}},
+ "FcSettings": None, "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(23)/UsageCounts"},
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(23)/UsageIdentitySets"}
+ val = self.module.compare_nested_dict(
+ modify_setting_payload, existing_setting_payload)
+ assert val is False
+
+ @pytest.mark.parametrize("vlan_payload",
+ [{"Name": "pool1", "EthernetSettings": {"Mac": {"StartingMacAddress": "qrvM3e6q"}}},
+ {"Name": "pool1", "EthernetSettings": {
+ "Mac": {"IdentityCount": 70}}},
+ {"Description": "Identity pool with ethernet setting",
+ "EthernetSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}},
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}}}])
+ def test_compare_payload_attributes_case_true(self, vlan_payload):
+ """setting values are same as existing payload and no need to apply the changes again"""
+ existing_setting_payload = {"@odata.context": "/api/$metadata#IdentityPoolService.IdentityPool",
+ "@odata.type": "#IdentityPoolService.IdentityPool",
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)", "Id": 30,
+ "Name": "pool1", "Description": "Identity pool with ethernet setting",
+ "CreatedBy": "admin", "CreationTime": "2020-01-31 11:31:13.621182",
+ "LastUpdatedBy": "admin", "LastUpdateTime": "2020-01-31 11:34:28.00876",
+ "EthernetSettings": {
+ "Mac": {"IdentityCount": 70, "StartingMacAddress": "qrvM3e6q"}},
+ "IscsiSettings": None,
+ "FcoeSettings": {"Mac": {"IdentityCount": 70, "StartingMacAddress": "cHBwcHAA"}},
+ "FcSettings": None, "UsageCounts": {
+ "@odata.id": "/api/IdentityPoolService/IdentityPools(30)/UsageCounts"},
+ "UsageIdentitySets@odata.navigationLink": "/api/IdentityPoolService/IdentityPools(30)/UsageIdentitySets"}
+ val = self.module.compare_nested_dict(
+ vlan_payload, existing_setting_payload)
+ assert val is True
+
+ @pytest.mark.parametrize("params", [{"module_args": {"template_name": "vlan_name", "nic_identifier": "NIC1",
+ "untagged_networks": [
+ {"port": 1, "untagged_network_name": "v1"}]},
+ "untag_dict": {"1": 13, "2": 14, "3": 11, "4": 12},
+ "tagged_dict": {"1": [10720], "2": [10719]},
+ "port_id_map": {"1": 13, "2": 14, "3": 11, "4": 12},
+ "port_untagged_map": {"1": 10719, "2": 10720, "3": 0, "4": 0},
+ "port_tagged_map": {"1": [10720], "2": [10719], "3": [], "4": []},
+ "port_nic_bond_map": {"1": "false", "2": "false", "3": "false", "4": "false"},
+ "nic_bonding_tech": True, "check_mode": True, "msg": CHANGES_FOUND}])
+ def test_ome_template_network_vlan_check_mode(self, params, ome_connection_mock_for_template_network_vlan,
+ ome_response_mock, ome_default_args, mocker):
+ mocker.patch(
+ MODULE_PATH + 'validate_vlans',
+ return_value=(
+ params.get("untag_dict"),
+ params.get("tagged_dict")))
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Name": "vlan_name", "Id": 12, "IdentityPoolId": 23})
+ mocker.patch(MODULE_PATH + 'get_template_vlan_info', return_value=(
+ params.get("port_id_map"), params.get(
+ "port_untagged_map"), params.get("port_tagged_map"),
+ params.get("port_nic_bond_map"), params.get("nic_bonding_tech")))
+ ome_default_args.update(params.get('module_args'))
+ result = self._run_module(
+ ome_default_args, check_mode=params.get(
+ 'check_mode', False))
+ assert result['msg'] == params['msg']
+
+ @pytest.mark.parametrize("params", [
+ {"fail_json": True, "json_data": {"JobId": 1234},
+ "get_vlan_name_id_map": {"v1": 1},
+ "mparams": {"template_name": "vlan_name", "nic_identifier": "NIC1",
+ "untagged_networks": [{"port": 1, "untagged_network_name": "v1"},
+ {"port": 1, "untagged_network_name": "v1"}]},
+ 'message': "port 1 is repeated for untagged_network_name", "success": True
+ },
+ {"fail_json": True, "json_data": {"JobId": 1234},
+ "get_vlan_name_id_map": {"v1": 1, "v2": 2},
+ "mparams": {"template_name": "vlan_name", "nic_identifier": "NIC1",
+ "untagged_networks": [{"port": 1, "untagged_network_name": "v1"},
+ {"port": 2, "untagged_network_name": "v2"}],
+ "tagged_networks": [{"port": 3, "tagged_network_names": ['bronzy']}]},
+ 'message': "bronzy is not a valid vlan name port 3", "success": True
+ }
+ ])
+ def test_main(self, params, ome_connection_mock_for_template_network_vlan, ome_default_args, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'get_vlan_name_id_map', return_value=params.get("get_vlan_name_id_map"))
+ # mocker.patch(MODULE_PATH + '_get_baseline_payload', return_value=params.get("_get_baseline_payload"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_default_args.update(params.get('mparams'))
+ if params.get("fail_json", False):
+ result = self._run_module_with_fail_json(ome_default_args)
+ else:
+ result = self._run_module(ome_default_args, check_mode=params.get("check_mode", False))
+ assert result["msg"] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_application_network_vlan_main_success_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_template_network_vlan,
+ ome_response_mock):
+ ome_default_args.update({"nic_identifier": "NIC1", "template_id": 123, "tagged_networks": [
+ {"port": 2, "tagged_network_ids": [22763], "tagged_network_names": ["gold", "silver"]}]})
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(
+ MODULE_PATH + 'validate_vlans',
+ side_effect=exc_type("TEST"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'validate_vlans',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'validate_vlans',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'proxy_configuration' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py
new file mode 100644
index 00000000..ac3c1814
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.0.0
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_user
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
+ AnsibleFailJSonException
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_for_user(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_user.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeUser(FakeAnsibleModule):
+ module = ome_user
+
+ def test__validate_inputs_fail_case(self, ome_connection_for_user):
+ f_module = self.get_module_mock(params={"state": "absent", "user_id": None})
+ with pytest.raises(Exception) as exc:
+ self.module._validate_inputs(f_module)
+ assert exc.value.args[0] == "One of the following 'user_id' or 'name' " \
+ "option is required for state 'absent'"
+
+ def test__validate_inputs_user_pass_case(self, mocker):
+ f_module = self.get_module_mock(params={"state": "absent", "user_id": 123})
+ fail_module_mock = mocker.patch(MODULE_PATH + 'ome_user.fail_module')
+ self.module._validate_inputs(f_module)
+ fail_module_mock.assert_not_called()
+
+ def test_get_user_id_from_name(self, ome_response_mock, ome_connection_for_user):
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {'value': [{"UserName": "Testname", "Id": 24}]}
+ ome_response_mock.status_code = 200
+ data = self.module.get_user_id_from_name(ome_connection_for_user, "Testname")
+ assert data == 24
+
+ def test_get_user_id_from_name01(self, ome_response_mock, ome_connection_for_user):
+ ome_response_mock.success = True
+ val = None
+ ome_response_mock.json_data = {'value': [{"UserName": "Testname", "Id": 24}]}
+ ome_response_mock.status_code = 200
+ data = self.module.get_user_id_from_name(ome_connection_for_user, "Test")
+ assert data == val
+
+ def test_get_user_id_from_name_case02(self, ome_connection_for_user):
+ val = None
+ data = self.module.get_user_id_from_name(ome_connection_for_user, None)
+ assert data == val
+
+ def test__get_resource_parameters_present_success_case01(self, ome_response_mock, ome_connection_for_user, mocker):
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_date = {'value': []}
+ f_module = self.get_module_mock(params={"state": "present",
+ "user_id": 23,
+ "attributes": {"UserName": "user1", "Password": "UserPassword",
+ "RoleId": "10", "Enabled": True}})
+ mocker.patch(MODULE_PATH + 'ome_user.get_user_id_from_name', return_value=23)
+ data = self.module._get_resource_parameters(f_module, ome_response_mock)
+ assert data == ('PUT', "AccountService/Accounts('23')",
+ {'Enabled': True, 'Id': 23, 'Password': 'UserPassword', 'RoleId': '10', 'UserName': 'user1'})
+
+ def test__get_resource_parameters_absent_success_case02(self, ome_response_mock, mocker, ome_connection_for_user,
+ ome_default_args):
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_date = {'value': []}
+ f_module = self.get_module_mock(params={"state": "absent", "user_id": 23})
+ mocker.patch(MODULE_PATH + 'ome_user.get_user_id_from_name', return_value=23)
+ data = self.module._get_resource_parameters(f_module, ome_response_mock)
+ assert data == ('DELETE', "AccountService/Accounts('23')", None)
+
+ def test__get_resource_parameters_case03(self, ome_response_mock, mocker, ome_default_args):
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_date = {'value': []}
+ f_module = self.get_module_mock(params={"state": "present",
+ "user_id": None,
+ "attributes": {"UserName": "user1", "Password": "UserPassword",
+ "RoleId": "10", "Enabled": True}})
+ mocker.patch(MODULE_PATH + 'ome_user.get_user_id_from_name', return_value=None)
+ data = self.module._get_resource_parameters(f_module, ome_response_mock)
+ assert data == ('POST', "AccountService/Accounts",
+ {'Enabled': True, 'Password': 'UserPassword', 'RoleId': '10', 'UserName': 'user1'})
+
+ def test__get_resource_parameters_fail_case(self, ome_response_mock, mocker):
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ ome_response_mock.json_date = {'value': []}
+ f_module = self.get_module_mock(params={"state": "absent", "user_id": None})
+ mocker.patch(MODULE_PATH + 'ome_user.get_user_id_from_name', return_value=None)
+ with pytest.raises(Exception) as exc:
+ self.module._get_resource_parameters(f_module, ome_response_mock)
+ assert exc.value.args[0] == "Unable to get the account because the specified account " \
+ "does not exist in the system."
+
+ def test__get_resource_parameters_fail_case_02(self, ome_response_mock, mocker):
+ fail_module_mock = mocker.patch(MODULE_PATH + 'ome_user.fail_module')
+ f_module = self.get_module_mock(params={"state": "absent", "user_id": None})
+ mocker.patch(MODULE_PATH + 'ome_user.get_user_id_from_name', return_value=None)
+ res = self.module._get_resource_parameters(f_module, ome_response_mock)
+ assert (res[0], res[1], res[2]) == ('DELETE', "AccountService/Accounts('None')", None)
+ assert fail_module_mock.assert_not_called
+
+ def test_main_user_success_case01(self, ome_default_args, mocker, ome_connection_for_user, ome_response_mock):
+ ome_default_args.update({"state": "absent", "user_id": 23})
+ mocker.patch(MODULE_PATH + 'ome_user._validate_inputs')
+ mocker.patch(MODULE_PATH + 'ome_user._get_resource_parameters',
+ return_value=["DELETE", "ACCOUNT_RESOURCE", {"user_id": 23}])
+ result = self._run_module(ome_default_args)
+ message_success = [
+ "Successfully deleted the User", "Successfully modified a User", "Successfully created a User"]
+ assert result['changed'] is True
+ assert result['msg'] in message_success
+
+ def test_main_user_success_case02(self, ome_default_args, mocker, ome_connection_for_user, ome_response_mock):
+ ome_default_args.update({"state": "present",
+ "user_id": 23,
+ "attributes": {"UserName": "user1", "Password": "UserPassword",
+ "RoleId": "10", "Enabled": True}})
+ mocker.patch(MODULE_PATH + 'ome_user._validate_inputs')
+ mocker.patch(MODULE_PATH + 'ome_user._get_resource_parameters',
+ return_value=["PUT", "ACCOUNT_RESOURCE", {"user_id": 23}])
+ result = self._run_module(ome_default_args)
+ message_success = [
+ "Successfully deleted the User", "Successfully modified a User", "Successfully created a User"]
+ assert result['changed'] is True
+ assert result['msg'] in message_success
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_main_user_exception_case(self, exc_type, mocker, ome_default_args, ome_connection_for_user,
+ ome_response_mock):
+ ome_default_args.update({"state": "present",
+ "user_id": 23,
+ "attributes": {"UserName": "user1", "Password": "UserPassword",
+ "RoleId": "10", "Enabled": True}})
+ mocker.patch(MODULE_PATH + 'ome_user._validate_inputs')
+ mocker.patch(
+ MODULE_PATH + 'ome_user._get_resource_parameters', return_value=("method",
+ "path",
+ "payload"))
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_for_user.invoke_request.side_effect = exc_type('test')
+ else:
+ mocker.patch(
+ MODULE_PATH + 'ome_user._get_resource_parameters',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+
+ @pytest.mark.parametrize("http_method, status_code", [('POST', 200), ('PUT', 200), ('DELETE', 204)])
+ def test_exit_module_user_success_case(self, http_method, status_code, ome_response_mock):
+ ome_response_mock.status_code = status_code
+ ome_response_mock.success = True
+ ome_response_mock.json_date = {'value': []}
+ f_module = self.get_module_mock()
+ msg_dict = {'POST': "Successfully created a User",
+ 'PUT': "Successfully modified a User",
+ 'DELETE': "Successfully deleted the User"}
+ with pytest.raises(Exception) as exc:
+ self.module.exit_module(f_module, ome_response_mock, http_method)
+ assert exc.value.args[0] == msg_dict[http_method]
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py
new file mode 100644
index 00000000..6d48cc18
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py
@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.1
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_user_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_user_info_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_user_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeUserInfo(FakeAnsibleModule):
+ module = ome_user_info
+
+ @pytest.mark.parametrize("module_params,data", [({"system_query_options": {"filter": "abc"}}, "$filter")])
+ def test_user_get_query_parameters(self, module_params, data, ome_connection_user_info_mock):
+ res = self.module._get_query_parameters(module_params)
+ if data is not None:
+ assert data in res
+ else:
+ assert res is None
+
+ def test_user_info_main_success_case_all(self, ome_default_args, ome_connection_user_info_mock, ome_response_mock):
+ ome_response_mock.json_data = {"value": [{"account_id": 1,
+ "system_query_options": "the user based on UserName"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert 'user_info' in result
+
+ def test_user_info_main_success_case_account_id(self, ome_default_args, ome_connection_user_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"account_id": 1})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"account_id": 1}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'user_info' in result
+
+ def test_user_info_success_case03(self, ome_default_args, ome_connection_user_info_mock, ome_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'ome_user_info._get_query_parameters',
+ return_value={"filter": "abc"})
+ ome_default_args.update({"system_query_options": {"filter": "abc"}})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"filter": "abc"}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'user_info' in result
+
+ def test_get_user_info_failure_case(self, ome_default_args, ome_connection_user_info_mock, ome_response_mock):
+ ome_response_mock.status_code = 500
+ ome_response_mock.success = False
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == 'Unable to retrieve the account details.'
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_ome_user_info_main_exception_handling_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_user_info_mock, ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_user_info_mock.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_connection_user_info_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(ome_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py
new file mode 100644
index 00000000..075406a7
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py
@@ -0,0 +1,452 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 4.1.0
+# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible_collections.dellemc.openmanage.plugins.modules import redfish_event_subscription
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+DESTINATION_INVALID = "The Parameter destination must have an HTTPS destination. The HTTP destination is not allowed"
+SUBSCRIPTION_EXISTS = "No changes found to be applied."
+SUBSCRIPTION_DELETED = "Successfully deleted the subscription."
+SUBSCRIPTION_UNABLE_DEL = "Unable to delete the subscription."
+SUBSCRIPTION_UNABLE_ADD = "Unable to add a subscription."
+SUBSCRIPTION_ADDED = "Successfully added the subscription."
+DESTINATION_MISMATCH = "No changes found to be applied."
+EVENT_TYPE_INVALID = "value of event_type must be one of: Alert, MetricReport, got: Metricreport"
+
+
+@pytest.fixture
+def redfish_connection_mock(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'redfish_event_subscription.Redfish')
+ redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_mock_obj
+
+
+class TestRedfishSubscription(FakeAnsibleModule):
+ module = redfish_event_subscription
+
+ @pytest.mark.parametrize("val", [{"destination": "https://192.168.1.100:8188"},
+ {"destination": "https://192.168.1.100:8189"}])
+ def test_function_get_subscription_success(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, val):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": val["destination"]})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+ json_data1 = {
+ "@odata.context": "/redfish/v1/$metadata#EventDestination.EventDestination",
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/c7e5c3fc-8204-11eb-bd10-2cea7ff7fe80",
+ "@odata.type": "#EventDestination.v1_6_0.EventDestination",
+ "Context": "RedfishEvent",
+ "DeliveryRetryPolicy": "RetryForever",
+ "Description": "Event Subscription Details",
+ "Destination": "https://192.168.1.100:8189",
+ "EventFormatType": "Event",
+ "EventTypes": [
+ "Alert"
+ ],
+ "EventTypes@odata.count": 1,
+ "HttpHeaders": [],
+ "HttpHeaders@odata.count": 0,
+ "Id": "c7e5c3fc-8204-11eb-bd10-2cea7ff7fe80",
+ "MetricReportDefinitions": [],
+ "MetricReportDefinitions@odata.count": 0,
+ "Name": "EventSubscription c7e5c3fc-8204-11eb-bd10-2cea7ff7fe80",
+ "OriginResources": [],
+ "OriginResources@odata.count": 0,
+ "Protocol": "Redfish",
+ "Status": {
+ "Health": "OK",
+ "HealthRollup": "OK",
+ "State": "Enabled"
+ },
+ "SubscriptionType": "RedfishEvent"
+ }
+ json_data2 = {
+ "@odata.context": "/redfish/v1/$metadata#EventDestination.EventDestination",
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "@odata.type": "#EventDestination.v1_6_0.EventDestination",
+ "Context": "RedfishEvent",
+ "DeliveryRetryPolicy": "RetryForever",
+ "Description": "Event Subscription Details",
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "EventTypes": [
+ "MetricReport"
+ ],
+ "EventTypes@odata.count": 1,
+ "HttpHeaders": [],
+ "HttpHeaders@odata.count": 0,
+ "Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "MetricReportDefinitions": [],
+ "MetricReportDefinitions@odata.count": 0,
+ "Name": "EventSubscription c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "OriginResources": [],
+ "OriginResources@odata.count": 0,
+ "Protocol": "Redfish",
+ "Status": {
+ "Health": "OK",
+ "HealthRollup": "OK",
+ "State": "Enabled"
+ },
+ "SubscriptionType": "RedfishEvent"
+ }
+
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription_details',
+ side_effect=[json_data1, json_data2])
+
+ redfish_response_mock.json_data = {
+ "@odata.context": "/redfish/v1/$metadata#EventDestinationCollection.EventDestinationCollection",
+ "@odata.id": "/redfish/v1/EventService/Subscriptions",
+ "@odata.type": "#EventDestinationCollection.EventDestinationCollection",
+ "Description": "List of Event subscriptions",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/c6ff37fc-8204-11eb-b08f-2cea7ff7fe80"
+ },
+ {
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/c7e5c3fc-8204-11eb-bd10-2cea7ff7fe80"
+ }
+ ],
+ "Members@odata.count": 2,
+ "Name": "Event Subscriptions Collection"
+ }
+ redfish_response_mock.success = True
+ f_module = self.get_module_mock(params=redfish_default_args)
+ result = self.module.get_subscription(redfish_connection_mock, val["destination"])
+ assert result["Destination"] == val["destination"]
+
+ @pytest.mark.parametrize("val", [
+ {"destination": "https://192.168.1.100:8188", "event_type": "MetricReport",
+ "event_format_type": "MetricReport"},
+ {"destination": "https://192.168.1.100:8188", "event_type": "Alert", "event_format_type": "Event"}])
+ def test_function_create_subscription(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, val):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": val["destination"]})
+ redfish_default_args.update({"event_type": val["event_type"]})
+ redfish_default_args.update({"event_format_type": val["event_format_type"]})
+
+ redfish_response_mock.json_data = {
+ "Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "Destination": val["destination"],
+ "EventFormatType": val["event_format_type"],
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": [val["event_type"]],
+ "SubscriptionType": "RedfishEvent"
+ }
+ redfish_response_mock.success = True
+ f_module = self.get_module_mock(params=redfish_default_args)
+ result = self.module.create_subscription(redfish_connection_mock, f_module)
+ assert result.json_data["Destination"] == val["destination"]
+ assert result.json_data["EventFormatType"] == val["event_format_type"]
+ assert result.json_data["EventTypes"] == [val["event_type"]]
+
+ @pytest.mark.parametrize("val", [
+ {"destination": "https://100.96.80.1:161", "event_type": "MetricReport",
+ "event_format_type": "MetricReport"},
+ {"destination": "https://100.96.80.1:161", "event_type": "Alert", "event_format_type": "Event"}])
+ def test_function_get_subscription_details(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, val):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": val["destination"]})
+ redfish_default_args.update({"event_type": val["event_type"]})
+ redfish_default_args.update({"event_format_type": val["event_format_type"]})
+
+ redfish_response_mock.json_data = {
+ "@odata.context": "/redfish/v1/$metadata#EventDestination.EventDestination",
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "@odata.type": "#EventDestination.v1_9_0.EventDestination",
+ "Actions": {
+ "#EventDestination.ResumeSubscription": {
+ "target": "/redfish/v1/EventService/Subscriptions/087b9026-0afa-11ec-8120-4cd98f5fc5a6/Actions/EventDestination.ResumeSubscription"
+ }
+ },
+ "Context": "RedfishEvent",
+ "DeliveryRetryPolicy": "RetryForever",
+ "Description": "Event Subscription Details",
+ "Destination": val['destination'],
+ "EventFormatType": val["event_format_type"],
+ "EventTypes": [val["event_type"]],
+ "EventTypes@odata.count": 1,
+ "HttpHeaders": [],
+ "HttpHeaders@odata.count": 0,
+ "Id": "087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "Name": "EventSubscription 087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "Protocol": "Redfish",
+ "Status": {
+ "Health": "OK",
+ "HealthRollup": "OK",
+ "State": "Enabled"
+ },
+ "SubscriptionType": "RedfishEvent"
+ }
+ redfish_response_mock.success = True
+ result = self.module.get_subscription_details(redfish_connection_mock, "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80")
+ assert result["Destination"] == val["destination"]
+ assert result["EventFormatType"] == val["event_format_type"]
+ assert result["EventTypes"] == [val["event_type"]]
+
+ @pytest.mark.parametrize("val", [
+ {"destination": "https://100.96.80.1:161", "event_type": "MetricReport",
+ "event_format_type": "MetricReport"},
+ {"destination": "https://100.96.80.1:161", "event_type": "Alert", "event_format_type": "Event"}])
+ def test_function_get_subscription_details_None(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, val):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": val["destination"]})
+ redfish_default_args.update({"event_type": val["event_type"]})
+ redfish_default_args.update({"event_format_type": val["event_format_type"]})
+
+ redfish_response_mock.json_data = {
+ "@odata.context": "/redfish/v1/$metadata#EventDestination.EventDestination",
+ "@odata.id": "/redfish/v1/EventService/Subscriptions/087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "@odata.type": "#EventDestination.v1_9_0.EventDestination",
+ "Actions": {
+ "#EventDestination.ResumeSubscription": {
+ "target": "/redfish/v1/EventService/Subscriptions/087b9026-0afa-11ec-8120-4cd98f5fc5a6/Actions/EventDestination.ResumeSubscription"
+ }
+ },
+ "Context": "RedfishEvent",
+ "DeliveryRetryPolicy": "RetryForever",
+ "Description": "Event Subscription Details",
+ "Destination": val['destination'],
+ "EventFormatType": val["event_format_type"],
+ "EventTypes": [val["event_type"]],
+ "EventTypes@odata.count": 1,
+ "HttpHeaders": [],
+ "HttpHeaders@odata.count": 0,
+ "Id": "087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "Name": "EventSubscription 087b9026-0afa-11ec-8120-4cd98f5fc5a6",
+ "Protocol": "Redfish",
+ "Status": {
+ "Health": "OK",
+ "HealthRollup": "OK",
+ "State": "Enabled"
+ },
+ "SubscriptionType": "RedfishEvent"
+ }
+ redfish_response_mock.success = False
+ result = self.module.get_subscription_details(redfish_connection_mock, "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80")
+ assert result is None
+
+ @pytest.mark.parametrize("val", [
+ {"destination": "https://100.96.80.1:161"},
+ {"destination": "https://100.96.80.1:161"}])
+ def test_function_delete_subscription(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, val):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": val["destination"]})
+
+ redfish_response_mock.json_data = {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "Successfully Completed Request",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "Base.1.7.Success",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "None",
+ "Severity": "OK"
+ },
+ {
+ "Message": "The operation successfully completed.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.4.SYS413",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }
+ ]
+ }
+ redfish_response_mock.success = True
+ result = self.module.delete_subscription(redfish_connection_mock, "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80")
+ assert result.json_data["@Message.ExtendedInfo"][0]["Message"] == "Successfully Completed Request"
+ assert result.json_data["@Message.ExtendedInfo"][1]["Message"] == "The operation successfully completed."
+
+ def test_module_validation_input_params(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": "http://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+ with pytest.raises(Exception) as err:
+ self._run_module(redfish_default_args)
+ assert err.value.args[0]['msg'] == DESTINATION_INVALID
+
+ def test_module_absent_does_not_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ redfish_connection_mock.patch(
+ MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=None)
+ redfish_response_mock.success = True
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == DESTINATION_MISMATCH
+
+ def test_module_absent_does_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ redfish_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=json_data)
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.delete_subscription', return_value=redfish_response_mock)
+ f_module = self.get_module_mock()
+ result = self._run_module(redfish_default_args)
+ print(result)
+ assert result["msg"] == SUBSCRIPTION_DELETED
+
+ def test_module_absent_does_exist_error(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "absent"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ redfish_response_mock.success = False
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=json_data)
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.delete_subscription', return_value=redfish_response_mock)
+ with pytest.raises(Exception) as err:
+ self._run_module(redfish_default_args)
+ assert err.value.args[0]['msg'] == SUBSCRIPTION_UNABLE_DEL
+
+ def test_module_present_does_not_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "present"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=None)
+ create_subscription_response_mock = redfish_response_mock
+ create_subscription_response_mock.json_data = json_data
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.create_subscription',
+ return_value=create_subscription_response_mock)
+ f_module = self.get_module_mock()
+ redfish_response_mock.success = True
+ result = self._run_module(redfish_default_args)
+ print(result)
+ assert result["msg"] == SUBSCRIPTION_ADDED
+
+ def test_module_present_does_not_exist_error(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "present"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=None)
+ create_subscription_response_mock = redfish_response_mock
+ create_subscription_response_mock.json_data = json_data
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.create_subscription',
+ return_value=create_subscription_response_mock)
+ redfish_response_mock.success = False
+ with pytest.raises(Exception) as err:
+ self._run_module(redfish_default_args)
+ assert err.value.args[0]['msg'] == SUBSCRIPTION_UNABLE_ADD
+
+ def test_module_present_does_not_exist_error_wrong_input(self, mocker, redfish_connection_mock,
+ redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "present"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "Metricreport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=None)
+ create_subscription_response_mock = redfish_response_mock
+ create_subscription_response_mock.json_data = json_data
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.create_subscription',
+ return_value=create_subscription_response_mock)
+ f_module = self.get_module_mock()
+ redfish_response_mock.success = True
+ with pytest.raises(Exception) as err:
+ self._run_module(redfish_default_args)
+ print(err)
+ assert err.value.args[0]['msg'] == EVENT_TYPE_INVALID
+
+ def test_module_present_does_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args):
+ redfish_default_args.update({"state": "present"})
+ redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"event_type": "MetricReport"})
+ redfish_default_args.update({"event_format_type": "MetricReport"})
+
+ json_data = {
+ "Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
+ "Destination": "https://192.168.1.100:8188",
+ "EventFormatType": "MetricReport",
+ "Context": "RedfishEvent",
+ "Protocol": "Redfish",
+ "EventTypes": ["MetricReport"],
+ "SubscriptionType": "RedfishEvent"
+ }
+ mocker.patch(MODULE_PATH + 'redfish_event_subscription.get_subscription', return_value=json_data)
+ redfish_response_mock.success = True
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == SUBSCRIPTION_EXISTS
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py
new file mode 100644
index 00000000..dac24df4
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py
@@ -0,0 +1,272 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.5.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+import sys
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import redfish_firmware
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from mock import MagicMock
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+from mock import patch, mock_open
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+JOB_URI = "/redfish/v1/JobService/Jobs/{job_id}"
+
+
+@pytest.fixture
+def redfish_firmware_connection_mock(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'redfish_firmware.Redfish')
+ redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_mock_obj
+
+
+class TestRedfishFirmware(FakeAnsibleModule):
+ module = redfish_firmware
+
+ @pytest.fixture
+ def os_mock(self, mocker):
+ try:
+ fi_mock = mocker.patch(
+ MODULE_PATH + 'redfish_firmware.payload_file.get("file")')
+ except AttributeError:
+ fi_mock = MagicMock()
+ obj = MagicMock()
+ fi_mock.read.return_value = obj
+ return fi_mock
+
+ update_status = {
+ "@odata.context": "/redfish/v1/$metadata#DellJob.DellJob",
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_824742691385",
+ "@odata.type": "#DellJob.v1_0_2.DellJob",
+ "CompletionTime": "2020-02-23T21:51:30",
+ "Description": "Job Instance",
+ "EndTime": None,
+ "Id": "JID_824742691385",
+ "JobState": "Completed",
+ "JobType": "RepositoryUpdate",
+ "Message": "Job completed successfully.",
+ "MessageArgs": [
+ "NA"
+ ],
+ "MessageArgs@odata.count": 1,
+ "MessageId": "RED001",
+ "Name": "Repository Update",
+ "PercentComplete": 100,
+ "StartTime": "TIME_NOW",
+ "Status": "Success",
+ "TargetSettingsURI": None,
+ "job_details": {
+ "Data": {
+ "StatusCode": 200,
+ "body": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "Successfully Completed Request",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "Base.1.5.Success",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "None",
+ "Severity": "OK"
+ }
+ ],
+ "PackageList": [
+ {
+ "BaseLocation": None,
+ "ComponentID": "18981",
+ "ComponentType": "APAC",
+ "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack",
+ "JobID": "JID_824746139010",
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12",
+ "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
+ }]
+
+ }
+ }
+ }
+ }
+
+ def test_main_redfish_firmware_success_case(self, redfish_firmware_connection_mock, redfish_default_args, mocker,
+ redfish_response_mock):
+ redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe"})
+ redfish_firmware_connection_mock.headers.get("Location").return_value = "https://multipart/form-data"
+ redfish_firmware_connection_mock.headers.get("Location").split().return_value = "multipart/form-data"
+ mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
+ return_value=redfish_response_mock)
+ redfish_response_mock.json_data = {"image_uri": "http://home/firmware_repo/component.exe"}
+ redfish_response_mock.status_code = 201
+ redfish_response_mock.success = True
+ result = self._run_module(redfish_default_args)
+ assert result == {'changed': True,
+ 'msg': 'Successfully submitted the firmware update task.',
+ 'task': {'id': redfish_response_mock.headers.get().split().__getitem__(),
+ 'uri': JOB_URI.format(job_id=redfish_response_mock.headers.get().split().__getitem__())}}
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_main_redfish_firmware_exception_handling_case(self, exc_type, mocker, redfish_default_args,
+ redfish_firmware_connection_mock,
+ redfish_response_mock):
+ redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe"})
+ redfish_response_mock.json_data = {"value": [{"image_uri": "/home/firmware_repo/component.exe"}]}
+ redfish_response_mock.status_code = 400
+ redfish_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert 'task' not in result
+ assert 'msg' in result
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'error_info' in result
+
+ def test_get_update_service_target_success_case(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock):
+ redfish_default_args.update({"transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {
+ "Actions": {
+ "#UpdateService.SimpleUpdate": {
+ "TransferProtocol@Redfish.AllowableValues": ["HTTP"],
+ "target": ""
+ }
+ },
+ "transfer_protocol": "HTTP",
+ "HttpPushUri": "http://dell.com",
+ "FirmwareInventory": {
+ "@odata.id": "2134"
+ }
+ }
+ result = self.module._get_update_service_target(redfish_firmware_connection_mock, f_module)
+ assert result == ('2134', 'http://dell.com', '')
+
+ def test_get_update_service_target_uri_none_case(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock):
+ redfish_default_args.update({"transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {
+ "Actions": {
+ "#UpdateService.SimpleUpdate": {
+ "TransferProtocol@Redfish.AllowableValues": ["HTTP"],
+ "target": None
+ }
+ },
+ "transfer_protocol": "HTTP",
+ "HttpPushUri": None,
+ "FirmwareInventory": {
+ "@odata.id": None
+ }
+ }
+ with pytest.raises(Exception) as ex:
+ self.module._get_update_service_target(redfish_firmware_connection_mock, f_module)
+ assert ex.value.args[0] == "Target firmware version does not support redfish firmware update."
+
+ def test_get_update_service_target_failed_case(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock):
+ redfish_default_args.update({"transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {
+ "Actions": {
+ "#UpdateService.SimpleUpdate": {
+ "TransferProtocol@Redfish.AllowableValues": [""]
+ }
+ },
+ "transfer_protocol": "HTTP",
+ "HttpPushUri": "http://dell.com",
+ "FirmwareInventory": {
+ "@odata.id": "2134"
+ }
+ }
+ with pytest.raises(Exception) as ex:
+ self.module._get_update_service_target(redfish_firmware_connection_mock, f_module)
+ assert ex.value.args[0] == "Target firmware version does not support {0} protocol.".format("HTTP")
+
+ def test_firmware_update_success_case01(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock, mocker):
+ mocker.patch(MODULE_PATH + 'redfish_firmware._get_update_service_target',
+ return_value=('2134', 'http://dell.com', 'redfish'))
+ redfish_default_args.update({"image_uri": "http://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"image_uri": "http://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"}
+ result = self.module.firmware_update(redfish_firmware_connection_mock, f_module)
+ assert result == redfish_response_mock
+
+ def test_firmware_update_success_case02(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock, mocker):
+ mocker.patch(MODULE_PATH + "redfish_firmware._get_update_service_target",
+ return_value=('2134', 'nhttp://dell.com', 'multipart/form-data'))
+ mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.redfish_firmware._encode_form_data",
+ return_value=({"file": (3, "nhttp://dell.com", "multipart/form-data")}, "multipart/form-data"))
+ redfish_default_args.update({"image_uri": "nhttp://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"image_uri": "nhttp://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"}
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data="data")) as mock_file:
+ result = self.module.firmware_update(redfish_firmware_connection_mock, f_module)
+ assert result == redfish_response_mock
+
+ def test_firmware_update_success_case03(self, redfish_default_args, redfish_firmware_connection_mock,
+ redfish_response_mock, mocker):
+ mocker.patch(MODULE_PATH + "redfish_firmware._get_update_service_target",
+ return_value=('2134', 'nhttp://dell.com', 'multipart/form-data'))
+ mocker.patch(MODULE_PATH + "redfish_firmware._encode_form_data",
+ return_value=({"file": (3, "nhttp://dell.com", "multipart/form-data")}, "multipart/form-data"))
+ redfish_default_args.update({"image_uri": "nhttp://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.status_code = 201
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"image_uri": "nhttp://home/firmware_repo/component.exe",
+ "transfer_protocol": "HTTP"}
+ if sys.version_info.major == 3:
+ builtin_module_name = 'builtins'
+ else:
+ builtin_module_name = '__builtin__'
+ with patch("{0}.open".format(builtin_module_name), mock_open(read_data="data")) as mock_file:
+ result = self.module.firmware_update(redfish_firmware_connection_mock, f_module)
+ assert result == redfish_response_mock
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py
new file mode 100644
index 00000000..1477015a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py
@@ -0,0 +1,475 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 2.1.3
+# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import redfish_powerstate
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+tarrget_error_msg = "The target device does not support the system reset" \
+ " feature using Redfish API."
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def redfish_connection_mock_for_powerstate(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'redfish_powerstate.Redfish')
+ redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_mock_obj
+
+
+class TestRedfishPowerstate(FakeAnsibleModule):
+ module = redfish_powerstate
+
+ def test_fetch_powerstate_resource_success_case_01(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """dynamically fetch the computer system id if one member exists in system"""
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ }
+ ],
+ "Actions": {
+ "#ComputerSystem.Reset": {
+ "target": "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset",
+ "ResetType@Redfish.AllowableValues": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ }
+ },
+ "PowerState": "On"
+ }
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert self.module.powerstate_map["allowable_enums"] == [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ assert self.module.powerstate_map['power_uri'] == '/redfish/v1/Systems/System.Embedded.1/Actions' \
+ '/ComputerSystem.Reset'
+ assert self.module.powerstate_map['current_state'] == 'On'
+
+ def test_fetch_powerstate_resource_resource_id_given_success_case(self,
+ redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """case when system id is explicitly provided"""
+ f_module = self.get_module_mock(params={"resource_id": "System.Embedded.2"})
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.2"
+ }
+ ],
+ "Actions": {
+ "#ComputerSystem.Reset": {
+ "target": "/redfish/v1/Systems/System.Embedded.2/Actions/ComputerSystem.Reset",
+ "ResetType@Redfish.AllowableValues": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ }
+ },
+ "PowerState": "On"
+ }
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert self.module.powerstate_map["allowable_enums"] == [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ assert self.module.powerstate_map['power_uri'] == '/redfish/v1/Systems/System.Embedded.2/Actions' \
+ '/ComputerSystem.Reset'
+ assert self.module.powerstate_map['current_state'] == 'On'
+
+ def test_fetch_powerstate_resource_resource_id_not_given_failure_case(self,
+ redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """case when system id not provided but multipble resource exists"""
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.2"
+ }
+ ],
+ "Actions": {
+ "#ComputerSystem.Reset": {
+ "target": "/redfish/v1/Systems/System.Embedded.2/Actions/ComputerSystem.Reset",
+ "ResetType@Redfish.AllowableValues": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ }
+ },
+ "PowerState": "On"
+ }
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert exc.value.args[0] == "Multiple devices exists in the system, but option 'resource_id' is not specified."
+
+ def test_fetch_powerstate_resource_resource_id_invalid_failure_case(self,
+ redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """failure case when system id is explicitly provided but which is not valid"""
+ f_module = self.get_module_mock(params={"resource_id": "System.Embedded.3"})
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members":
+ [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.2"
+ }
+ ],
+ "Actions": {
+ "#ComputerSystem.Reset": {
+ "target": "/redfish/v1/Systems/System.Embedded.2/Actions/ComputerSystem.Reset",
+ "ResetType@Redfish.AllowableValues": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ }
+ },
+ "PowerState": "On"
+ }
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert exc.value.args[0] == "Invalid device Id 'System.Embedded.3' is provided"
+
+ def test_fetch_powerstate_resource_error_case_01(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """failure case when system does not supports redfish computer system in schema"""
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "@odata.id": "/redfish/v1/Systems",
+ "Members": [
+ ],
+ }
+
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert exc.value.args[0] == tarrget_error_msg
+
+ def test_fetch_powerstate_resource_error_case_02(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """failuere case when system does not supports redfish computer system action in schema"""
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ }
+ ],
+ "Actions": {
+
+ }}
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert exc.value.args[0] == tarrget_error_msg
+
+ def test_fetch_powerstate_resource_error_case_03(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """failuere case when system does not supports and throws http error not found"""
+ f_module = self.get_module_mock()
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError('http://testhost.com', 404,
+ json.dumps(tarrget_error_msg), {},
+ None)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+
+ def test_fetch_powerstate_resource_error_case_04(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """failure case when system does not supports and throws http error 400 bad request"""
+ f_module = self.get_module_mock()
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ tarrget_error_msg,
+ {}, None)
+ with pytest.raises(Exception, match=tarrget_error_msg) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+
+ def test_fetch_powerstate_resource_error_case_05(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ msg = "connection error"
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_powerstate.invoke_request.side_effect = URLError(msg)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+
+ def test_fetch_powerstate_resource_error_case_06(self, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ """when both system id and mebers of id not provided"""
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "Systems": {
+ "@odata.id": "/redfish/v1/Systems"
+ },
+ "Members": [
+ ],
+ "Actions": {
+
+ }}
+ redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_power_uri_resource(f_module, redfish_connection_mock_for_powerstate)
+ assert exc.value.args[0] == tarrget_error_msg
+
+ power_vals = [{"apply": "On", "current": "On", "result": False},
+ {"apply": "On", "current": "PoweringOn", "result": False},
+ {"apply": "On", "current": "Off", "result": True},
+ {"apply": "On", "current": "PoweringOff", "result": True},
+ {"apply": "ForceOn", "current": "On", "result": False},
+ {"apply": "ForceOn", "current": "PoweringOn", "result": False},
+ {"apply": "ForceOn", "current": "Off", "result": True},
+ {"apply": "ForceOn", "current": "PoweringOff", "result": True},
+ {"apply": "PushPowerButton", "current": "On", "result": True},
+ {"apply": "PushPowerButton", "current": "PoweringOn", "result": True},
+ {"apply": "PushPowerButton", "current": "Off", "result": True},
+ {"apply": "PushPowerButton", "current": "PoweringOff", "result": True},
+ {"apply": "ForceOff", "current": "On", "result": True},
+ {"apply": "ForceOff", "current": "PoweringOn", "result": True},
+ {"apply": "ForceOff", "current": "Off", "result": False},
+ {"apply": "ForceOff", "current": "PoweringOff", "result": False},
+ {"apply": "ForceRestart", "current": "On", "result": True},
+ {"apply": "ForceRestart", "current": "PoweringOn", "result": True},
+ {"apply": "ForceRestart", "current": "Off", "result": False},
+ {"apply": "ForceRestart", "current": "PoweringOff", "result": False},
+ {"apply": "GracefulRestart", "current": "On", "result": True},
+ {"apply": "GracefulRestart", "current": "PoweringOn", "result": True},
+ {"apply": "GracefulRestart", "current": "Off", "result": False},
+ {"apply": "GracefulRestart", "current": "PoweringOff", "result": False},
+ {"apply": "GracefulShutdown", "current": "On", "result": True},
+ {"apply": "GracefulShutdown", "current": "PoweringOn", "result": True},
+ {"apply": "GracefulShutdown", "current": "Off", "result": False},
+ {"apply": "GracefulShutdown", "current": "PoweringOff", "result": False},
+ {"apply": "Nmi", "current": "On", "result": True},
+ {"apply": "Nmi", "current": "PoweringOn", "result": True},
+ {"apply": "Nmi", "current": "Off", "result": False},
+ {"apply": "Nmi", "current": "PoweringOff", "result": False},
+ {"apply": "PowerCycle", "current": "On", "result": True},
+ {"apply": "PowerCycle", "current": "PoweringOn", "result": True},
+ {"apply": "PowerCycle", "current": "Off", "result": False},
+ {"apply": "PowerCycle", "current": "PoweringOff", "result": False},
+
+ ]
+
+ @pytest.mark.parametrize("power_map", power_vals)
+ def test_is_change_applicable_for_power_state(self, power_map):
+ apply_state = power_map["apply"]
+ current_state = power_map["current"]
+ result = power_map["result"]
+ res = self.module.is_change_applicable_for_power_state(current_state, apply_state)
+ assert res is result
+
+ def test_is_change_applicable_for_power_state_case_02(self):
+ apply_state = "xyz"
+ current_state = "On"
+ result = False
+ res = self.module.is_change_applicable_for_power_state(current_state, apply_state)
+ assert res is result
+
+ def test_is_valid_reset_type(self):
+ f_module = self.get_module_mock()
+ reset_type = "GracefulRestart"
+ allowable_enum = [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ error_msg = "The target device does not support a" \
+ " graceful restart operation.The acceptable values for device reset types" \
+ " are {0}.".format(", ".join(allowable_enum))
+ with pytest.raises(Exception) as exc:
+ self.module.is_valid_reset_type(reset_type, allowable_enum, f_module)
+ assert exc.value.args[0] == error_msg
+
+ def test_is_valid_reset_type_case2(self):
+ f_module = self.get_module_mock()
+ reset_type = "ForceOff"
+ allowable_enum = [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]
+ self.module.is_valid_reset_type(reset_type, allowable_enum, f_module)
+
+ @pytest.mark.parametrize("val", [{"change_applicable": True, "check_mode_msg": "Changes found to be applied."},
+ {"change_applicable": False, "check_mode_msg": "No Changes found to be applied."}])
+ def test_run_change_power_state_case_with_checkmode(self, mocker, val):
+ change_applicable = val["change_applicable"]
+ message = val["check_mode_msg"]
+ f_module = self.get_module_mock(params={"reset_type": "On"}, check_mode=True)
+ self.module.powerstate_map.update({"allowable_enums": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]})
+ self.module.powerstate_map.update({'power_uri': '/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem'
+ '.Reset'})
+ self.module.powerstate_map.update({'current_state': 'On'})
+
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.fetch_power_uri_resource',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.is_valid_reset_type',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.is_change_applicable_for_power_state',
+ return_value=change_applicable)
+
+ with pytest.raises(Exception, match=message):
+ self.module.run_change_power_state(redfish_connection_mock_for_powerstate, f_module)
+
+ @pytest.mark.parametrize("val", [{"change_applicable": True, "status_code": 204},
+ {"change_applicable": False, "status_code": 200},
+ {"change_applicable": True, "status_code": 200}])
+ def test_run_change_power_state_case_without_checkmode(self, mocker, val, redfish_connection_mock_for_powerstate,
+ redfish_response_mock):
+ redfish_response_mock.status_code = val["status_code"]
+ change_applicable = val["change_applicable"]
+ f_module = self.get_module_mock(params={"reset_type": "On"})
+ self.module.powerstate_map.update({"allowable_enums": [
+ "On",
+ "ForceOff",
+ "ForceRestart",
+ "GracefulShutdown",
+ "PushPowerButton",
+ "Nmi",
+ "PowerCycle"
+ ]})
+ self.module.powerstate_map.update({'power_uri': '/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem'
+ '.Reset'})
+ self.module.powerstate_map.update({'current_state': 'On'})
+ if change_applicable is True:
+ if val["status_code"] == 204:
+ redfish_response_mock.success = True
+ message = "Successfully performed the reset type operation 'On'."
+ else:
+ redfish_response_mock.success = False
+ message = "Unable to perform the reset type operation 'On'."
+ else:
+ message = "The device is already powered on."
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.fetch_power_uri_resource',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.is_valid_reset_type',
+ return_value=None)
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.is_change_applicable_for_power_state',
+ return_value=change_applicable)
+
+ with pytest.raises(Exception, match=message):
+ self.module.run_change_power_state(redfish_connection_mock_for_powerstate, f_module)
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError,
+ Exception])
+ def test_main_redfish_powerstate_exception_handling_case(self, exc_type, redfish_default_args,
+ redfish_connection_mock_for_powerstate,
+ redfish_response_mock, mocker):
+ redfish_default_args.update({"reset_type": "On"})
+ redfish_response_mock.status_code = 400
+ redfish_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.run_change_power_state',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(redfish_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.run_change_power_state',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'redfish_powerstate.run_change_power_state',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
new file mode 100644
index 00000000..55fb3535
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
@@ -0,0 +1,610 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell EMC OpenManage Ansible Modules
+# Version 5.3.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import redfish_storage_volume
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def redfish_connection_mock_for_storage_volume(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'redfish_storage_volume.Redfish')
+ redfish_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_mock_obj
+
+
+class TestStorageVolume(FakeAnsibleModule):
+ module = redfish_storage_volume
+
+ @pytest.fixture
+ def storage_volume_base_uri(self):
+ self.module.storage_collection_map.update({"storage_base_uri": "/redfish/v1/Systems/System.Embedded.1/Storage"})
+
+ arg_list1 = [{"state": "present"}, {"state": "present", "volume_id": "volume_id"},
+ {"state": "absent", "volume_id": "volume_id"},
+ {"command": "initialize", "volume_id": "volume_id"},
+ {"state": "present", "volume_type": "NonRedundant",
+ "name": "name", "controller_id": "controller_id",
+ "drives": ["drive1"],
+ "block_size_bytes": 123,
+ "capacity_bytes": "1234567",
+ "optimum_io_size_bytes": "1024",
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": False,
+ "volume_id": "volume_id", "oem": {"Dell": "DellAttributes"},
+ "initialize_type": "Slow"
+ }]
+
+ @pytest.mark.parametrize("param", arg_list1)
+ def test_redfish_storage_volume_main_success_case_01(self, mocker, redfish_default_args, module_mock,
+ redfish_connection_mock_for_storage_volume, param):
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.validate_inputs')
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.fetch_storage_resource')
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
+ return_value={"msg": "Successfully submitted volume task.",
+ "task_uri": "task_uri",
+ "task_id": 1234})
+ redfish_default_args.update(param)
+ result = self._run_module(redfish_default_args)
+ assert result["changed"] is True
+ assert result['msg'] == "Successfully submitted volume task."
+ assert result["task"]["id"] == 1234
+ assert result["task"]["uri"] == "task_uri"
+
+ arg_list2 = [
+ {"state": "absent"},
+ {"command": "initialize"}, {}]
+
+ @pytest.mark.parametrize("param", arg_list2)
+ def test_redfish_storage_volume_main_failure_case_01(self, param, redfish_default_args, module_mock):
+ """required parameter is not passed along with specified report_type"""
+ redfish_default_args.update(param)
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert 'msg' in result
+ assert "task" not in result
+ assert result['failed'] is True
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_redfish_storage_volume_main_exception_handling_case(self, exc_type, mocker, redfish_default_args,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ redfish_default_args.update({"state": "present"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.validate_inputs')
+ redfish_response_mock.status_code = 400
+ redfish_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
+ side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert 'task' not in result
+ assert 'msg' in result
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'error_info' in result
+
+ msg1 = "Either state or command should be provided to further actions."
+ msg2 = "When state is present, either controller_id or volume_id must be specified to perform further actions."
+
+ @pytest.mark.parametrize("input",
+ [{"param": {"xyz": 123}, "msg": msg1}, {"param": {"state": "present"}, "msg": msg2}])
+ def test_validate_inputs_error_case_01(self, input):
+ f_module = self.get_module_mock(params=input["param"])
+ with pytest.raises(Exception) as exc:
+ self.module.validate_inputs(f_module)
+ assert exc.value.args[0] == input["msg"]
+
+ def test_get_success_message_case_01(self):
+ action = "create"
+ message = self.module.get_success_message(action, "JobService/Jobs/JID_1234")
+ assert message["msg"] == "Successfully submitted {0} volume task.".format(action)
+ assert message["task_uri"] == "JobService/Jobs/JID_1234"
+ assert message["task_id"] == "JID_1234"
+
+ def test_get_success_message_case_02(self):
+ action = "create"
+ message = self.module.get_success_message(action, None)
+ assert message["msg"] == "Successfully submitted {0} volume task.".format(action)
+
+ @pytest.mark.parametrize("input", [{"state": "present"}, {"state": "absent"}, {"command": "initialize"}])
+ def test_configure_raid_operation(self, input, redfish_connection_mock_for_storage_volume, mocker):
+ f_module = self.get_module_mock(params=input)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_volume_create_modify',
+ return_value={"msg": "Successfully submitted create volume task.",
+ "task_uri": "JobService/Jobs",
+ "task_id": "JID_123"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_volume_deletion',
+ return_value={"msg": "Successfully submitted delete volume task.",
+ "task_uri": "JobService/Jobs",
+ "task_id": "JID_456"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_volume_initialization',
+ return_value={"msg": "Successfully submitted initialize volume task.",
+ "task_uri": "JobService/Jobs",
+ "task_id": "JID_789"})
+ message = self.module.configure_raid_operation(f_module, redfish_connection_mock_for_storage_volume)
+ val = list(input.values())
+ if val[0] == "present":
+ assert message["msg"] == "Successfully submitted create volume task."
+ assert message["task_id"] == "JID_123"
+ if val[0] == "absent":
+ assert message["msg"] == "Successfully submitted delete volume task."
+ assert message["task_id"] == "JID_456"
+ if val[0] == "initialize":
+ assert message["msg"] == "Successfully submitted initialize volume task."
+ assert message["task_id"] == "JID_789"
+
+ def test_perform_volume_initialization_success_case_01(self, mocker, redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ message = {"msg": "Successfully submitted initialize volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_789"}
+ f_module = self.get_module_mock(params={"initialize_type": "Fast", "volume_id": "volume_id"})
+ obj1 = mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_initialization_progress', return_value=[])
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
+ message = self.module.perform_volume_initialization(f_module, redfish_connection_mock_for_storage_volume)
+ assert message["msg"] == "Successfully submitted initialize volume task."
+ assert message["task_id"] == "JID_789"
+
+ @pytest.mark.parametrize("operations", [[{"OperationName": "initialize", "PercentageComplete": 70}],
+ [{"OperationName": "initialize"}]])
+ def test_perform_volume_initialization_failure_case_01(self, mocker, operations,
+ redfish_connection_mock_for_storage_volume):
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_initialization_progress', return_value=operations)
+ percentage_complete = operations[0].get("PercentageComplete")
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_initialization(f_module, redfish_connection_mock_for_storage_volume)
+ if percentage_complete:
+ assert exc.value.args[0] == "Cannot perform the configuration operation because the configuration" \
+ " job 'initialize' in progress is at '70' percentage."
+ else:
+ assert exc.value.args[0] == "Cannot perform the configuration operations because a" \
+ " configuration job for the device already exists."
+
+ def test_perform_volume_initialization_failure_case_02(self, mocker, redfish_connection_mock_for_storage_volume):
+ f_module = self.get_module_mock(params={})
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_initialization(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "'volume_id' option is a required property for initializing a volume."
+
+ def test_perform_volume_deletion_success_case_01(self, mocker, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock, storage_volume_base_uri):
+ redfish_response_mock.success = True
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ message = {"msg": "Successfully submitted delete volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_456"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action',
+ return_value=redfish_response_mock)
+ self.module.perform_volume_deletion(f_module, redfish_connection_mock_for_storage_volume)
+ assert message["msg"] == "Successfully submitted delete volume task."
+ assert message["task_id"] == "JID_456"
+
+ def testperform_volume_deletion_failure_case_01(self, mocker, redfish_connection_mock_for_storage_volume):
+ f_module = self.get_module_mock(params={})
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_deletion(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "'volume_id' option is a required property for deleting a volume."
+
+ def test_perform_volume_create_modify_success_case_01(self, mocker, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ f_module = self.get_module_mock(params={"volume_id": "volume_id", "controller_id": "controller_id"})
+ message = {"msg": "Successfully submitted create volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_123"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_controller_id_exists', return_value=True)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ assert message["msg"] == "Successfully submitted create volume task."
+ assert message["task_id"] == "JID_123"
+
+ def test_perform_volume_create_modify_success_case_02(self, mocker, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_123"}
+ redfish_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ assert message["msg"] == "Successfully submitted modify volume task."
+ assert message["task_id"] == "JID_123"
+
+ def test_perform_volume_create_modify_failure_case_01(self, mocker, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_123"}
+ redfish_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Input options are not provided for the modify volume task."
+
+ def test_perform_storage_volume_action_success_case(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume):
+ redfish_response_mock.headers.update({"Location": "JobService/Jobs/JID_123"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_success_message', return_value="message")
+ msg = self.module.perform_storage_volume_action("POST", "uri", redfish_connection_mock_for_storage_volume,
+ "create", payload={"payload": "value"})
+ assert msg == "message"
+
+ def test_perform_storage_volume_action_exception_case(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume):
+ redfish_response_mock.headers.update({"Location": "JobService/Jobs/JID_123"})
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ '', {}, None)
+ with pytest.raises(HTTPError) as ex:
+ self.module.perform_storage_volume_action("POST", "uri", redfish_connection_mock_for_storage_volume,
+ "create", payload={"payload": "value"})
+
+ def test_check_initialization_progress_case_01(self, mocker, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.success = False
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ opeartion_data = self.module.check_initialization_progress(f_module, redfish_connection_mock_for_storage_volume,
+ "volume_id")
+ assert opeartion_data == []
+
+ def test_check_initialization_progress_case_02(self, mocker, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Operations": "operation_value"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ opeartion_data = self.module.check_initialization_progress(f_module, redfish_connection_mock_for_storage_volume,
+ "volume_id")
+ assert opeartion_data == "operation_value"
+
+ def test_check_volume_id_exists(self, mocker, redfish_connection_mock_for_storage_volume, storage_volume_base_uri,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.status_code = 200
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_specified_identifier_exists_in_the_system',
+ return_value=redfish_response_mock)
+ resp = self.module.check_volume_id_exists(f_module, redfish_connection_mock_for_storage_volume, "volume_id")
+ assert resp.status_code == 200
+
+ def test_check_controller_id_exists_success_case_01(self, mocker, redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "controller_id"})
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Drives": "drive1"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_specified_identifier_exists_in_the_system',
+ return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_physical_disk_exists',
+ return_value=True)
+ output = self.module.check_controller_id_exists(f_module, redfish_connection_mock_for_storage_volume)
+ assert output is True
+
+ def test_check_controller_id_exists_failure_case_01(self, mocker, redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "1234"})
+ redfish_response_mock.success = False
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_specified_identifier_exists_in_the_system',
+ return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_physical_disk_exists',
+ return_value=True)
+ with pytest.raises(Exception) as exc:
+ self.module.check_controller_id_exists(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Failed to retrieve the details of the specified Controller Id 1234."
+
+ def test_check_specified_identifier_exists_in_the_system_success_case(self,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "1234"})
+ redfish_response_mock.status_code = True
+ redfish_response_mock.json_data = {"id": "data"}
+ resp = self.module.check_specified_identifier_exists_in_the_system(f_module,
+ redfish_connection_mock_for_storage_volume,
+ "uri",
+ "Specified Controller 123"
+ " does not exist in the System.")
+ assert resp.json_data == {"id": "data"}
+
+ def test_check_specified_identifier_exists_in_the_system_exception_case_01(self,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "1234"})
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com',
+ 404,
+ "Specified Controller 123 does"
+ " not exist in the System.",
+ {}, None)
+ with pytest.raises(Exception) as exc:
+ self.module.check_specified_identifier_exists_in_the_system(f_module,
+ redfish_connection_mock_for_storage_volume,
+ "uri",
+ "Specified Controller 123"
+ " does not exist in the System.")
+ assert exc.value.args[0] == "Specified Controller 123 does not exist in the System."
+
+ def test_check_specified_identifier_exists_in_the_system_exception_case_02(self,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "1234"})
+ msg = "http error"
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ msg, {}, None)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.check_specified_identifier_exists_in_the_system(f_module,
+ redfish_connection_mock_for_storage_volume,
+ "uri",
+ "Specified Controller 123 does not exist in the System.")
+
+ def test_check_specified_identifier_exists_in_the_system_exception_case_03(self,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"controller_id": "1234"})
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = URLError('test')
+ with pytest.raises(URLError) as exc:
+ self.module.check_specified_identifier_exists_in_the_system(f_module,
+ redfish_connection_mock_for_storage_volume,
+ "uri",
+ "Specified Controller"
+ " 123 does not exist in the System.")
+
+ def test_check_physical_disk_exists_success_case_01(self):
+ drive = [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/"
+ "Storage/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ }
+ ]
+ f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1",
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"]})
+ val = self.module.check_physical_disk_exists(f_module, drive)
+ assert val is True
+
+ def test_check_physical_disk_exists_success_case_02(self):
+ drive = [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/Storage/"
+ "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ }
+ ]
+ f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1", "drives": []})
+ val = self.module.check_physical_disk_exists(f_module, drive)
+ assert val is True
+
+ def test_check_physical_disk_exists_error_case_01(self):
+ drive = [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/"
+ "Storage/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ }
+ ]
+ f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1", "drives": ["invalid_drive"]})
+ with pytest.raises(Exception) as exc:
+ self.module.check_physical_disk_exists(f_module, drive)
+ assert exc.value.args[0] == "Following Drive(s) invalid_drive are not attached to the specified" \
+ " Controller Id: RAID.Mezzanine.1C-1."
+
+ def test_check_physical_disk_exists_error_case_02(self):
+ drive = [
+ ]
+ f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1",
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"]})
+ with pytest.raises(Exception) as exc:
+ self.module.check_physical_disk_exists(f_module, drive)
+ assert exc.value.args[0] == "No Drive(s) are attached to the specified Controller Id: RAID.Mezzanine.1C-1."
+
+ def test_volume_payload_case_01(self, storage_volume_base_uri):
+ param = {
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
+ "capacity_bytes": 299439751168,
+ "block_size_bytes": 512,
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": True,
+ "volume_type": "NonRedundant",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536,
+ "oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
+ "DiskCachePolicy": "Disabled",
+ "LockStatus": "Unlocked",
+ "MediaType": "HardDiskDrive",
+ "ReadCachePolicy": "NoReadAhead",
+ "SpanDepth": 1,
+ "SpanLength": 2,
+ "WriteCachePolicy": "WriteThrough"}}}}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
+ "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ assert payload["VolumeType"] == "NonRedundant"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["CapacityBytes"] == 299439751168
+ assert payload["OptimumIOSizeBytes"] == 65536
+ assert payload["Encrypted"] is True
+ assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
+ assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+
+ def test_volume_payload_case_02(self):
+ param = {"block_size_bytes": 512,
+ "volume_type": "NonRedundant",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["VolumeType"] == "NonRedundant"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["OptimumIOSizeBytes"] == 65536
+
+ def test_volume_payload_case_03(self, storage_volume_base_uri):
+ """Testing encrypted value in case value is passed false"""
+ param = {
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
+ "capacity_bytes": 299439751168,
+ "block_size_bytes": 512,
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": False,
+ "volume_type": "NonRedundant",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536,
+ "oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
+ "DiskCachePolicy": "Disabled",
+ "LockStatus": "Unlocked",
+ "MediaType": "HardDiskDrive",
+ "ReadCachePolicy": "NoReadAhead",
+ "SpanDepth": 1,
+ "SpanLength": 2,
+ "WriteCachePolicy": "WriteThrough"}}}}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/" \
+ "Storage/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ assert payload["VolumeType"] == "NonRedundant"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["CapacityBytes"] == 299439751168
+ assert payload["OptimumIOSizeBytes"] == 65536
+ assert payload["Encrypted"] is False
+ assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
+ assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+
+ def test_fetch_storage_resource_success_case_01(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "@odata.id": "/redfish/v1/Systems",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ }
+ ],
+ "Storage": {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/Storage"
+ },
+ }
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+ assert self.module.storage_collection_map["storage_base_uri"] == "/redfish/v1/Systems/System.Embedded.1/Storage"
+
+ def test_fetch_storage_resource_error_case_01(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "@odata.id": "/redfish/v1/Systems",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1"
+ }
+ ],
+ }
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Target out-of-band controller does not support storage feature using Redfish API."
+
+ def test_fetch_storage_resource_error_case_02(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {
+ "@odata.id": "/redfish/v1/Systems",
+ "Members": [
+ ],
+ }
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Target out-of-band controller does not support storage feature using Redfish API."
+
+ def test_fetch_storage_resource_error_case_03(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ msg = "Target out-of-band controller does not support storage feature using Redfish API."
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 404,
+ json.dumps(msg), {}, None)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+
+ def test_fetch_storage_resource_error_case_04(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ msg = "http error"
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ msg, {}, None)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+
+ def test_fetch_storage_resource_error_case_05(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock()
+ msg = "connection error"
+ redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = URLError(msg)
+ with pytest.raises(Exception, match=msg) as exc:
+ self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
+
+ def test_check_mode_validation(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock, storage_volume_base_uri):
+ param = {"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"],
+ "capacity_bytes": 214748364800, "block_size_bytes": 512, "encryption_types": "NativeDriveEncryption",
+ "encrypted": False, "volume_type": "NonRedundant", "optimum_io_size_bytes": 65536}
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as exc:
+ self.module.check_mode_validation(
+ f_module, redfish_connection_mock_for_storage_volume, "create",
+ "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ assert exc.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {"Members@odata.count": 0}
+ with pytest.raises(Exception) as exc:
+ self.module.check_mode_validation(
+ f_module, redfish_connection_mock_for_storage_volume, "create",
+ "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ assert exc.value.args[0] == "Changes found to be applied."
+ redfish_response_mock.json_data = {
+ "Members@odata.count": 1, "Id": "Disk.Virtual.0:RAID.Integrated.1-1",
+ "Members": [{"@odata.id": "/redfish/v1/Systems/System.Embedded.1/Storage/"
+ "RAID.Integrated.1-1/Volumes/Disk.Virtual.0:RAID.Integrated.1-1"}],
+ "Name": "VD0", "BlockSizeBytes": 512, "CapacityBytes": 214748364800, "Encrypted": False,
+ "EncryptionTypes": ["NativeDriveEncryption"], "OptimumIOSizeBytes": 65536, "VolumeType": "NonRedundant",
+ "Links": {"Drives": [{"@odata.id": "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"}]}}
+ param.update({"name": "VD0"})
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ with pytest.raises(Exception) as exc:
+ self.module.check_mode_validation(
+ f_module, redfish_connection_mock_for_storage_volume, "create",
+ "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ assert exc.value.args[0] == "No changes found to be applied."