summaryrefslogtreecommitdiffstats
path: root/ansible_collections/dellemc/openmanage/tests
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/dellemc/openmanage/tests')
-rw-r--r--ansible_collections/dellemc/openmanage/tests/README.md18
-rw-r--r--ansible_collections/dellemc/openmanage/tests/config.yml2
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt3
-rw-r--r--ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt7
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py345
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py389
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py171
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py133
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py128
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py108
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py75
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py122
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py67
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py325
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py9
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py174
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py231
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py814
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py746
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py12
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py91
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py24
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network_attributes.py1011
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py13
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py742
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py499
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py83
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py59
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py174
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user_info.py231
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py70
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py20
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies.py1578
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py93
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_category_info.py2670
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_info.py121
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py84
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py13
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py50
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py22
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py12
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py22
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py13
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py58
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py9
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py112
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py25
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py267
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py217
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py38
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py245
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py175
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py28
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py43
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py56
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py20
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py16
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py22
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py30
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py12
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py34
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py23
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py11
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py18
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py28
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile_info.py1279
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py122
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py44
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_info.py324
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py1155
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py294
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan_info.py346
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py11
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py60
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py83
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware_rollback.py299
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py15
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py571
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/utils.py49
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/requirements.txt (renamed from ansible_collections/dellemc/openmanage/tests/requirements.txt)2
106 files changed, 15707 insertions, 2210 deletions
diff --git a/ansible_collections/dellemc/openmanage/tests/README.md b/ansible_collections/dellemc/openmanage/tests/README.md
index f66cdd59d..a7d90ff01 100644
--- a/ansible_collections/dellemc/openmanage/tests/README.md
+++ b/ansible_collections/dellemc/openmanage/tests/README.md
@@ -1,5 +1,5 @@
### Overview
-Dell EMC OpenManage Ansible Modules unit test scripts are located under
+Dell OpenManage Ansible Modules unit test scripts are located under
[unit](./tests/unit) directory.
### Implementing the unit tests
@@ -10,27 +10,15 @@ Any contribution must have an associated unit test. This section covers the
addition to the tested module name. For example: test_ome_user
### Prerequisites
-* Dell EMC OpenManage collections - to install run `ansible-galaxy collection
+* Dell OpenManage collections - to install run `ansible-galaxy collection
install dellemc.openmanage`
* To run the unittest for iDRAC modules, install OpenManage Python Software Development Kit (OMSDK) using
-`pip install omsdk --upgrade` or from [Dell EMC OpenManage Python SDK](https://github.com/dell/omsdk)
+`pip install omsdk --upgrade` or from [Dell OpenManage Python SDK](https://github.com/dell/omsdk)
### Executing unit tests
You can execute them manually by using any tool of your choice, like `pytest` or `ansible-test`.
#### Executing with `ansible-test`
-* Clone [Ansible repository](https://github.com/ansible/ansible) from GitHub to local $ANSIBLE_DIR.
-* Copy `compat` directory from the cloned repository path.
- `$ANSIBLE_DIR/test/units/` to the location of the installed Dell EMC OpenManage collection `$ANSIBLE_COLLECTIONS_PATHS/ansible_collections/dellemc/openmanage/tests/unit`.
-* Copy `utils.py` file from `$ANSIBLE_DIR/test/units/modules` tests location to the location of the installed collection `$ANSIBLE_COLLECTIONS_PATHS/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules`
-* Edit the copied `utils.py` to refer the above `compat` package as below:
-```python
- from units.compat import unittest
-
- # Replace the above lines in utils.py as below
-
- from ansible_collections.dellemc.openmanage.tests.unit.compat import unittest
-```
* To install `ansible-test` requirements use
```
ansible-test units --requirements
diff --git a/ansible_collections/dellemc/openmanage/tests/config.yml b/ansible_collections/dellemc/openmanage/tests/config.yml
new file mode 100644
index 000000000..22131f4f5
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/config.yml
@@ -0,0 +1,2 @@
+modules:
+ python_requires: '>=3.9.6'
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt
deleted file mode 100644
index f6fec0eb5..000000000
--- a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.10.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
-plugins/modules/idrac_attributes.py compile-2.6!skip
-plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt
deleted file mode 100644
index f6fec0eb5..000000000
--- a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.11.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
-plugins/modules/idrac_attributes.py compile-2.6!skip
-plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt
deleted file mode 100644
index f6fec0eb5..000000000
--- a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.12.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
-plugins/modules/idrac_attributes.py compile-2.6!skip
-plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt b/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt
deleted file mode 100644
index 9d8f3ba14..000000000
--- a/ansible_collections/dellemc/openmanage/tests/sanity/ignore-2.9.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-plugins/modules/dellemc_get_firmware_inventory.py validate-modules:deprecation-mismatch
-plugins/modules/dellemc_get_firmware_inventory.py validate-modules:invalid-documentation
-plugins/modules/dellemc_get_system_inventory.py validate-modules:deprecation-mismatch
-plugins/modules/dellemc_get_system_inventory.py validate-modules:invalid-documentation
-tests/unit/plugins/modules/test_ome_server_interface_profiles.py compile-2.6!skip
-plugins/modules/idrac_attributes.py compile-2.6!skip
-plugins/modules/idrac_attributes.py import-2.6!skip \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py
new file mode 100644
index 000000000..fc3b3543d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py
@@ -0,0 +1,345 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.3.0
+# Copyright (C) 2023 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, OpenURLResponse
+from mock import MagicMock
+import json
+import os
+
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+OPEN_URL = 'idrac_redfish.open_url'
+TEST_PATH = "/testpath"
+INVOKE_REQUEST = 'idrac_redfish.iDRACRedfishAPI.invoke_request'
+JOB_COMPLETE = 'idrac_redfish.iDRACRedfishAPI.wait_for_job_complete'
+API_TASK = '/api/tasks'
+SLEEP_TIME = 'idrac_redfish.time.sleep'
+
+
+class TestIdracRedfishRest(object):
+
+ @pytest.fixture
+ def mock_response(self):
+ mock_response = MagicMock()
+ mock_response.getcode.return_value = 200
+ mock_response.headers = mock_response.getheaders.return_value = {
+ 'X-Auth-Token': 'token_id'}
+ mock_response.read.return_value = json.dumps({"value": "data"})
+ return mock_response
+
+ @pytest.fixture
+ def module_params(self):
+ module_parameters = {'idrac_ip': '192.168.0.1', 'idrac_user': 'username',
+ 'idrac_password': 'password', 'idrac_port': '443'}
+ return module_parameters
+
+ @pytest.fixture
+ def idrac_redfish_object(self, module_params):
+ idrac_redfish_obj = iDRACRedfishAPI(module_params)
+ return idrac_redfish_obj
+
+ def test_invoke_request_with_session(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ req_session = True
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ module_params = {'idrac_ip': '2001:db8:3333:4444:5555:6666:7777:8888', 'idrac_user': 'username',
+ 'idrac_password': 'password', "idrac_port": '443'}
+ req_session = False
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session_with_header(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ req_session = False
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "POST", headers={
+ "application": "octstream"})
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_with_session_connection_error(self, mocker, mock_response, module_params):
+ mock_response.success = False
+ mock_response.status_code = 500
+ mock_response.json_data = {}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ req_session = True
+ with pytest.raises(ConnectionError):
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ @pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
+ def test_invoke_request_error_case_handling(self, exc, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ side_effect=exc("test"))
+ req_session = False
+ with pytest.raises(exc):
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ def test_invoke_request_http_error_handling(self, mock_response, mocker, module_params):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ open_url_mock.side_effect = HTTPError('https://testhost.com/', 400,
+ 'Bad Request Error', {}, None)
+ req_session = False
+ with pytest.raises(HTTPError):
+ with iDRACRedfishAPI(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ @pytest.mark.parametrize("query_params", [
+ {"inp": {"$filter": "UserName eq 'admin'"},
+ "out": "%24filter=UserName+eq+%27admin%27"},
+ {"inp": {"$top": 1, "$skip": 2, "$filter": "JobType/Id eq 8"}, "out":
+ "%24top=1&%24skip=2&%24filter=JobType%2FId+eq+8"},
+ {"inp": {"$top": 1, "$skip": 3}, "out": "%24top=1&%24skip=3"}
+ ])
+ def test_build_url(self, query_params, mocker, idrac_redfish_object):
+ """builds complete url"""
+ base_uri = 'https://192.168.0.1:443/api'
+ path = "/AccountService/Accounts"
+ mocker.patch(MODULE_UTIL_PATH + 'idrac_redfish.iDRACRedfishAPI._get_url',
+ return_value=base_uri + path)
+ inp = query_params["inp"]
+ out = query_params["out"]
+ url = idrac_redfish_object._build_url(
+ path, query_param=inp)
+ assert url == base_uri + path + "?" + out
+
+ def test_build_url_none(self, mocker, idrac_redfish_object):
+ """builds complete url"""
+ base_uri = 'https://192.168.0.1:443/api'
+ mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
+ return_value=base_uri)
+ url = idrac_redfish_object._build_url("", None)
+ assert url == ""
+
+ def test_invalid_json_openurlresp(self):
+ obj = OpenURLResponse({})
+ obj.body = 'invalid json'
+ with pytest.raises(ValueError) as e:
+ obj.json_data
+ assert e.value.args[0] == "Unable to parse json"
+
+ def test_reason(self):
+ def mock_read():
+ return "{}"
+
+ obj = MagicMock()
+ obj.reason = "returning reason"
+ obj.read = mock_read
+ ourl = OpenURLResponse(obj)
+ reason_ret = ourl.reason
+ assert reason_ret == "returning reason"
+
+ @pytest.mark.parametrize("task_inp", [{"job_wait": True, "job_status": {"TaskState": "Completed"}}])
+ def test_wait_for_job_complete(self, mocker, mock_response, task_inp, idrac_redfish_object):
+ mock_response.json_data = task_inp.get("job_status")
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + SLEEP_TIME,
+ return_value=None)
+ ret_resp = idrac_redfish_object.wait_for_job_complete(
+ API_TASK, task_inp.get("job_wait"))
+ assert ret_resp.json_data == mock_response.json_data
+
+ def test_wait_for_job_complete_false(self, mocker, mock_response, idrac_redfish_object):
+ mock_response.json_data = {"TaskState": "Completed"}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + SLEEP_TIME,
+ return_value=None)
+ ret_resp = idrac_redfish_object.wait_for_job_complete(API_TASK, False)
+ assert ret_resp is None
+
+ def test_wait_for_job_complete_value_error(self, mocker, mock_response, module_params):
+ mock_response.json_data = {"TaskState": "Completed"}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ side_effect=ValueError("test"))
+ with pytest.raises(ValueError):
+ with iDRACRedfishAPI(module_params, True) as obj:
+ obj.wait_for_job_complete(API_TASK, True)
+
+ @pytest.mark.parametrize("inp_data", [
+ {
+ "j_data": {"PercentComplete": 100, "JobState": "Completed"},
+ "job_wait": True,
+ "reboot": True,
+ "apply_update": True
+ },
+ {
+ "j_data": {"PercentComplete": 0, "JobState": "Starting"},
+ "job_wait": True,
+ "reboot": False,
+ "apply_update": True
+ },
+ {
+ "j_data": {"PercentComplete": 0, "JobState": "Starting"},
+ "job_wait": False,
+ "reboot": False,
+ "apply_update": True
+ },
+ ])
+ def test_wait_for_job_completion(self, mocker, mock_response, inp_data, idrac_redfish_object):
+ mock_response.json_data = inp_data.get("j_data")
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + SLEEP_TIME,
+ return_value=None)
+ ret_resp = idrac_redfish_object.wait_for_job_completion(API_TASK, inp_data.get(
+ "job_wait"), inp_data.get("reboot"), inp_data.get("apply_update"))
+ assert ret_resp.json_data is mock_response.json_data
+
+ @pytest.mark.parametrize("share_inp", [
+ {"share_ip": "share_ip", "share_name": "share_name", "share_type": "share_type",
+ "file_name": "file_name", "username": "username", "password": "password",
+ "ignore_certificate_warning": "ignore_certificate_warning",
+ "proxy_support": "proxy_support", "proxy_type": "proxy_type",
+ "proxy_port": "proxy_port", "proxy_server": "proxy_server",
+ "proxy_username": "proxy_username", "proxy_password": "proxy_password"}, {}, None])
+ def test_export_scp(self, mocker, mock_response, share_inp, idrac_redfish_object):
+ mock_response.json_data = {"Status": "Completed"}
+ mock_response.status_code = 202
+ mock_response.headers = {"Location": API_TASK}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + JOB_COMPLETE,
+ return_value={"Status": "Completed"})
+ job_wait = share_inp is not None
+ resp = idrac_redfish_object.export_scp("xml", "export_use",
+ "All", job_wait, share_inp)
+ if job_wait:
+ assert resp == {"Status": "Completed"}
+ else:
+ assert resp.json_data == {"Status": "Completed"}
+
+ @pytest.mark.parametrize("share_inp", [
+ {"share_ip": "share_ip", "share_name": "share_name", "share_type": "share_type",
+ "file_name": "file_name", "username": "username", "password": "password",
+ "ignore_certificate_warning": "ignore_certificate_warning",
+ "proxy_support": "proxy_support", "proxy_type": "proxy_type",
+ "proxy_port": "proxy_port", "proxy_server": "proxy_server",
+ "proxy_username": "proxy_username", "proxy_password": "proxy_password"}, {}, None])
+ def test_import_scp_share(self, mocker, mock_response, share_inp, idrac_redfish_object):
+ mock_response.json_data = {"Status": "Completed"}
+ mock_response.status_code = 202
+ mock_response.headers = {"Location": API_TASK}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ imp_buffer = "import_buffer"
+ if share_inp is not None:
+ imp_buffer = None
+ resp = idrac_redfish_object.import_scp_share(
+ "shutdown_type", "host_powerstate", True, "All", imp_buffer, share_inp)
+ assert resp.json_data == {"Status": "Completed"}
+
+ @pytest.mark.parametrize("share_inp", [
+ {"share_ip": "share_ip", "share_name": "share_name", "share_type": "share_type",
+ "file_name": "file_name", "username": "username", "password": "password",
+ "ignore_certificate_warning": "ignore_certificate_warning",
+ "proxy_support": "proxy_support", "proxy_type": "proxy_type",
+ "proxy_port": "proxy_port", "proxy_server": "proxy_server",
+ "proxy_username": "proxy_username", "proxy_password": "proxy_password"}, {}, None])
+ def test_import_preview(self, mocker, mock_response, share_inp, idrac_redfish_object):
+ mock_response.json_data = {"Status": "Completed"}
+ mock_response.status_code = 202
+ mock_response.headers = {"Location": API_TASK}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + JOB_COMPLETE,
+ return_value={"Status": "Completed"})
+ job_wait = True
+ imp_buffer = "import_buffer"
+ if share_inp is not None:
+ imp_buffer = None
+ job_wait = False
+ resp = idrac_redfish_object.import_preview(
+ imp_buffer, "All", share_inp, job_wait)
+ if job_wait:
+ assert resp == {"Status": "Completed"}
+ else:
+ assert resp.json_data == {"Status": "Completed"}
+
+ @pytest.mark.parametrize("status_code", [202, 200])
+ def test_import_scp(self, mocker, mock_response, status_code, idrac_redfish_object):
+ mock_response.json_data = {"Status": "Completed"}
+ mock_response.status_code = status_code
+ mock_response.headers = {"Location": "/tasks/1"}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + JOB_COMPLETE,
+ return_value=mock_response)
+ resp = idrac_redfish_object.import_scp("imp_buffer", "All", True)
+ assert resp.json_data == {"Status": "Completed"}
+
+ @pytest.mark.parametrize("status_code", [202, 200])
+ def test_import_preview_scp(self, mocker, mock_response, status_code, idrac_redfish_object):
+ mock_response.json_data = {"Status": "Completed"}
+ mock_response.status_code = status_code
+ mock_response.headers = {"Location": "/tasks/1"}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mocker.patch(MODULE_UTIL_PATH + JOB_COMPLETE,
+ return_value=mock_response)
+ resp = idrac_redfish_object.import_preview_scp(
+ "imp_buffer", "All", True)
+ assert resp.json_data == {"Status": "Completed"}
+
+ def test_requests_ca_bundle_set(self, mocker, mock_response, idrac_redfish_object):
+ os.environ["REQUESTS_CA_BUNDLE"] = "/path/to/requests_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = idrac_redfish_object._get_omam_ca_env()
+ assert result == "/path/to/requests_ca_bundle.pem"
+ del os.environ["REQUESTS_CA_BUNDLE"]
+
+ def test_curl_ca_bundle_set(self, mocker, mock_response, idrac_redfish_object):
+ os.environ["CURL_CA_BUNDLE"] = "/path/to/curl_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = idrac_redfish_object._get_omam_ca_env()
+ assert result == "/path/to/curl_ca_bundle.pem"
+ del os.environ["CURL_CA_BUNDLE"]
+
+ def test_omam_ca_bundle_set(self, mocker, mock_response, idrac_redfish_object):
+ os.environ["OMAM_CA_BUNDLE"] = "/path/to/omam_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = idrac_redfish_object._get_omam_ca_env()
+ assert result == "/path/to/omam_ca_bundle.pem"
+ del os.environ["OMAM_CA_BUNDLE"]
+
+ def test_no_env_variable_set(self, mocker, mock_response, idrac_redfish_object):
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = idrac_redfish_object._get_omam_ca_env()
+ assert result is None
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
index fc0f0be53..93892a744 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2019-2022 Dell Inc.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2019-2023 Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
@@ -17,268 +17,403 @@ __metaclass__ = type
import pytest
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OpenURLResponse
from mock import MagicMock
import json
MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+OME_OPENURL = 'ome.open_url'
+TEST_PATH = "/testpath"
+INVOKE_REQUEST = 'ome.RestOME.invoke_request'
+JOB_SUBMISSION = 'ome.RestOME.job_submission'
+DEVICE_API = "DeviceService/Devices"
+TEST_HOST = 'https://testhost.com/'
+BAD_REQUEST = 'Bad Request Error'
+ODATA_COUNT = "@odata.count"
+ODATA_TYPE = "@odata.type"
+DDEVICE_TYPE = "#DeviceService.DeviceType"
-class TestRestOME(object):
-
- @pytest.fixture
- def ome_response_mock(self, mocker):
- set_method_result = {'json_data': {}}
- response_class_mock = mocker.patch(
- MODULE_UTIL_PATH + 'ome.OpenURLResponse',
- return_value=set_method_result)
- response_class_mock.success = True
- response_class_mock.status_code = 200
- return response_class_mock
+class TestOMERest(object):
@pytest.fixture
def mock_response(self):
mock_response = MagicMock()
mock_response.getcode.return_value = 200
- mock_response.headers = mock_response.getheaders.return_value = {'X-Auth-Token': 'token_id'}
+ mock_response.headers = mock_response.getheaders.return_value = {
+ 'X-Auth-Token': 'token_id'}
mock_response.read.return_value = json.dumps({"value": "data"})
return mock_response
+ @pytest.fixture
+ def module_params(self):
+ module_parameters = {'hostname': '192.168.0.1', 'username': 'username',
+ 'password': 'password', "port": 443}
+ return module_parameters
+
+ @pytest.fixture
+ def ome_object(self, module_params):
+ ome_obj = RestOME(module_params=module_params)
+ return ome_obj
+
def test_invoke_request_with_session(self, mock_response, mocker):
- mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+
+ mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
+ module_params = {'hostname': '[2001:db8:3333:4444:5555:6666:7777:8888]', 'username': 'username',
'password': 'password', "port": 443}
req_session = True
with RestOME(module_params, req_session) as obj:
- response = obj.invoke_request("/testpath", "GET")
+
+ response = obj.invoke_request(TEST_PATH, "GET")
assert response.status_code == 200
assert response.json_data == {"value": "data"}
assert response.success is True
- def test_invoke_request_without_session(self, mock_response, mocker):
- mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ def test_invoke_request_without_session(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
req_session = False
with RestOME(module_params, req_session) as obj:
- response = obj.invoke_request("/testpath", "GET")
+ response = obj.invoke_request(TEST_PATH, "GET")
assert response.status_code == 200
assert response.json_data == {"value": "data"}
assert response.success is True
- def test_invoke_request_without_session_with_header(self, mock_response, mocker):
- mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ def test_invoke_request_without_session_with_header(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
req_session = False
with RestOME(module_params, req_session) as obj:
- response = obj.invoke_request("/testpath", "POST", headers={"application": "octstream"})
+ response = obj.invoke_request(TEST_PATH, "POST", headers={
+ "application": "octstream"})
assert response.status_code == 200
assert response.json_data == {"value": "data"}
assert response.success is True
- def test_invoke_request_with_session_connection_error(self, mocker, mock_response):
+ def test_invoke_request_with_session_connection_error(self, mocker, mock_response, module_params):
mock_response.success = False
mock_response.status_code = 500
mock_response.json_data = {}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
req_session = True
with pytest.raises(ConnectionError):
with RestOME(module_params, req_session) as obj:
- obj.invoke_request("/testpath", "GET")
+ obj.invoke_request(TEST_PATH, "GET")
@pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
- def test_invoke_request_error_case_handling(self, exc, mock_response, mocker):
- open_url_mock = mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ def test_invoke_request_error_case_handling(self, exc, mock_response, mocker, module_params):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
open_url_mock.side_effect = exc("test")
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
req_session = False
- with pytest.raises(exc) as e:
+ with pytest.raises(exc):
with RestOME(module_params, req_session) as obj:
- obj.invoke_request("/testpath", "GET")
+ obj.invoke_request(TEST_PATH, "GET")
- def test_invoke_request_http_error_handling(self, mock_response, mocker):
- open_url_mock = mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ def test_invoke_request_http_error_handling(self, mock_response, mocker, module_params):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- open_url_mock.side_effect = HTTPError('http://testhost.com/', 400,
- 'Bad Request Error', {}, None)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
+ open_url_mock.side_effect = HTTPError(TEST_HOST, 400,
+ BAD_REQUEST, {}, None)
req_session = False
- with pytest.raises(HTTPError) as e:
+ with pytest.raises(HTTPError):
with RestOME(module_params, req_session) as obj:
- obj.invoke_request("/testpath", "GET")
+ obj.invoke_request(TEST_PATH, "GET")
- def test_get_all_report_details(self, mock_response, mocker):
+ def test_get_all_report_details(self, mock_response, mocker, module_params):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 50, "value": list(range(51))}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mock_response.json_data = {ODATA_COUNT: 53, "value": list(range(50))}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
with RestOME(module_params, True) as obj:
- reports = obj.get_all_report_details("DeviceService/Devices")
- assert reports == {"resp_obj": mock_response, "report_list": list(range(51))}
+ reports = obj.get_all_report_details(DEVICE_API)
+ assert reports == {"resp_obj": mock_response,
+ "report_list": list(range(50)) + (list(range(50)))}
- def test_get_report_list_error_case(self, mock_response, mocker):
- mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ def test_get_report_list_error_case(self, mock_response, mocker, ome_object):
+ mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- invoke_obj = mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
- side_effect=HTTPError('http://testhost.com/', 400, 'Bad Request Error', {}, None))
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with pytest.raises(HTTPError) as e:
- with RestOME(module_params, False) as obj:
- obj.get_all_report_details("DeviceService/Devices")
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ side_effect=HTTPError(TEST_HOST, 400, BAD_REQUEST, {}, None))
+ with pytest.raises(HTTPError):
+ ome_object.get_all_report_details(DEVICE_API)
@pytest.mark.parametrize("query_param", [
- {"inp": {"$filter": "UserName eq 'admin'"}, "out": "%24filter=UserName%20eq%20%27admin%27"},
+ {"inp": {"$filter": "UserName eq 'admin'"},
+ "out": "%24filter=UserName%20eq%20%27admin%27"},
{"inp": {"$top": 1, "$skip": 2, "$filter": "JobType/Id eq 8"}, "out":
"%24top=1&%24skip=2&%24filter=JobType%2FId%20eq%208"},
{"inp": {"$top": 1, "$skip": 3}, "out": "%24top=1&%24skip=3"}
])
- def test_build_url(self, query_param, mocker):
+ def test_build_url(self, query_param, mocker, module_params):
"""builds complete url"""
base_uri = 'https://192.168.0.1:443/api'
path = "AccountService/Accounts"
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME._get_base_url',
return_value=base_uri)
inp = query_param["inp"]
out = query_param["out"]
- url = RestOME(module_params=module_params)._build_url(path, query_param=inp)
+ url = RestOME(module_params=module_params)._build_url(
+ path, query_param=inp)
assert url == base_uri + "/" + path + "?" + out
assert "+" not in url
- def test_get_job_type_id(self, mock_response, mocker):
+ def test_get_job_type_id(self, mock_response, mocker, ome_object):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 50, "value": [{"Name": "PowerChange", "Id": 11}]}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mock_response.json_data = {ODATA_COUNT: 50,
+ "value": [{"Name": "PowerChange", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
jobtype_name = "PowerChange"
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(module_params, True) as obj:
- job_id = obj.get_job_type_id(jobtype_name)
+ job_id = ome_object.get_job_type_id(jobtype_name)
assert job_id == 11
- def test_get_job_type_id_null_case(self, mock_response, mocker):
+ def test_get_job_type_id_null_case(self, mock_response, mocker, ome_object):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 50, "value": [{"Name": "PowerChange", "Id": 11}]}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mock_response.json_data = {ODATA_COUNT: 50,
+ "value": [{"Name": "PowerChange", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
jobtype_name = "FirmwareUpdate"
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(module_params, True) as obj:
- job_id = obj.get_job_type_id(jobtype_name)
+ job_id = ome_object.get_job_type_id(jobtype_name)
assert job_id is None
- def test_get_device_id_from_service_tag_ome_case01(self, mocker, mock_response):
+ def test_get_device_id_from_service_tag_ome_case01(self, mocker, mock_response, ome_object):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 1, "value": [{"Name": "xyz", "Id": 11}]}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mock_response.json_data = {ODATA_COUNT: 1,
+ "value": [{"Name": "xyz", "Id": 11}]}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
- ome_default_args = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(ome_default_args, True) as obj:
- details = obj.get_device_id_from_service_tag("xyz")
+ details = ome_object.get_device_id_from_service_tag("xyz")
assert details["Id"] == 11
assert details["value"] == {"Name": "xyz", "Id": 11}
- def test_get_device_id_from_service_tag_ome_case02(self, mocker, mock_response):
+ def test_get_device_id_from_service_tag_ome_case02(self, mocker, mock_response, ome_object):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 0, "value": []}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mock_response.json_data = {ODATA_COUNT: 0, "value": []}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
- ome_default_args = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(ome_default_args, True) as obj:
- details = obj.get_device_id_from_service_tag("xyz")
+ details = ome_object.get_device_id_from_service_tag("xyz")
assert details["Id"] is None
assert details["value"] == {}
- def test_get_all_items_with_pagination(self, mock_response, mocker):
+ def test_get_all_items_with_pagination(self, mock_response, mocker, ome_object):
mock_response.success = True
mock_response.status_code = 200
- mock_response.json_data = {"@odata.count": 50, "value": list(range(51))}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
- return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(module_params, True) as obj:
- reports = obj.get_all_items_with_pagination("DeviceService/Devices")
- assert reports == {"total_count": 50, "value": list(range(51))}
+ mock_response.json_data = {ODATA_COUNT: 100, "value": list(
+ range(50)), '@odata.nextLink': '/api/DeviceService/Devices2'}
- def test_get_all_items_with_pagination_error_case(self, mock_response, mocker):
- mocker.patch(MODULE_UTIL_PATH + 'ome.open_url',
+ mock_response_page2 = MagicMock()
+ mock_response_page2.success = True
+ mock_response_page2.status_code = 200
+ mock_response_page2.json_data = {
+ ODATA_COUNT: 100, "value": list(range(50, 100))}
+
+ def mock_invoke_request(*args, **kwargs):
+ if args[1] == DEVICE_API:
+ return mock_response
+ return mock_response_page2
+
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ side_effect=mock_invoke_request)
+ reports = ome_object.get_all_items_with_pagination(DEVICE_API)
+ assert reports == {"total_count": 100, "value": list(range(100))}
+
+ def test_get_all_items_with_pagination_error_case(self, mock_response, mocker, ome_object):
+ mocker.patch(MODULE_UTIL_PATH + OME_OPENURL,
return_value=mock_response)
- invoke_obj = mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
- side_effect=HTTPError('http://testhost.com/', 400, 'Bad Request Error', {}, None))
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with pytest.raises(HTTPError) as e:
- with RestOME(module_params, False) as obj:
- obj.get_all_items_with_pagination("DeviceService/Devices")
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ side_effect=HTTPError(TEST_HOST, 400, BAD_REQUEST, {}, None))
+ with pytest.raises(HTTPError):
+ ome_object.get_all_items_with_pagination(DEVICE_API)
- def test_get_device_type(self, mock_response, mocker):
+ def test_get_device_type(self, mock_response, mocker, ome_object):
mock_response.success = True
mock_response.status_code = 200
mock_response.json_data = {
"@odata.context": "/api/$metadata#Collection(DeviceService.DeviceType)",
- "@odata.count": 5,
+ ODATA_COUNT: 5,
"value": [
{
- "@odata.type": "#DeviceService.DeviceType",
+ ODATA_TYPE: DDEVICE_TYPE,
"DeviceType": 1000,
"Name": "SERVER",
"Description": "Server Device"
},
{
- "@odata.type": "#DeviceService.DeviceType",
+ ODATA_TYPE: DDEVICE_TYPE,
"DeviceType": 2000,
"Name": "CHASSIS",
"Description": "Chassis Device"
},
{
- "@odata.type": "#DeviceService.DeviceType",
+ ODATA_TYPE: DDEVICE_TYPE,
"DeviceType": 3000,
"Name": "STORAGE",
"Description": "Storage Device"
},
{
- "@odata.type": "#DeviceService.DeviceType",
+ ODATA_TYPE: DDEVICE_TYPE,
"DeviceType": 4000,
"Name": "NETWORK_IOM",
"Description": "NETWORK IO Module Device"
},
{
- "@odata.type": "#DeviceService.DeviceType",
+ ODATA_TYPE: DDEVICE_TYPE,
"DeviceType": 8000,
"Name": "STORAGE_IOM",
"Description": "Storage IOM Device"
}
]
}
- mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.invoke_request',
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
return_value=mock_response)
- module_params = {'hostname': '192.168.0.1', 'username': 'username',
- 'password': 'password', "port": 443}
- with RestOME(module_params, False) as obj:
- type_map = obj.get_device_type()
+ type_map = ome_object.get_device_type()
assert type_map == {1000: "SERVER", 2000: "CHASSIS", 3000: "STORAGE",
4000: "NETWORK_IOM", 8000: "STORAGE_IOM"}
+
+ def test_invalid_json_openurlresp(self):
+ obj = OpenURLResponse({})
+ obj.body = 'invalid json'
+ with pytest.raises(ValueError) as e:
+ obj.json_data
+ assert e.value.args[0] == "Unable to parse json"
+
+ @pytest.mark.parametrize("status_assert", [
+ {'id': 2060, 'exist_poll': True, 'job_failed': False,
+ 'message': "Job Completed successfully."},
+ {'id': 2070, 'exist_poll': True, 'job_failed': True,
+ 'message': "Job is in Failed state, and is not completed."},
+ {'id': 1234, 'exist_poll': False, 'job_failed': False, 'message': None}])
+ def test_get_job_info(self, mocker, mock_response, status_assert, ome_object):
+
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {
+ 'LastRunStatus': {'Id': status_assert['id']}
+ }
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ exit_poll, job_failed, message = ome_object.get_job_info(12345)
+
+ assert exit_poll is status_assert['exist_poll']
+ assert job_failed is status_assert['job_failed']
+ assert message == status_assert['message']
+
+ def test_get_job_exception(self, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ side_effect=HTTPError(TEST_HOST, 400,
+ BAD_REQUEST, {}, None))
+ with pytest.raises(HTTPError):
+ with RestOME(module_params, True) as obj:
+ obj.get_job_info(12345)
+
+ @pytest.mark.parametrize("ret_val", [
+ (True, False, "My Message"),
+ (False, True, "The job is not complete after 2 seconds.")])
+ def test_job_tracking(self, mocker, mock_response, ret_val, ome_object):
+ mocker.patch(MODULE_UTIL_PATH + 'ome.time.sleep',
+ return_value=())
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+
+ mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME.get_job_info',
+ return_value=ret_val)
+ job_failed, message = ome_object.job_tracking(12345, 2, 1)
+ assert job_failed is ret_val[1]
+ assert message == ret_val[2]
+
+ def test_strip_substr_dict(self, mocker, mock_response, ome_object):
+ data_dict = {"@odata.context": "/api/$metadata#Collection(DeviceService.DeviceType)",
+ ODATA_COUNT: 5,
+ "value": [
+ {
+ ODATA_TYPE: DDEVICE_TYPE,
+ "DeviceType": 1000,
+ "Name": "SERVER",
+ "Description": "Server Device"
+ },
+ {
+ ODATA_TYPE: DDEVICE_TYPE,
+ "DeviceType": 2000,
+ "Name": "CHASSIS",
+ "Description": "Chassis Device"
+ }
+ ]}
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ ret = ome_object.strip_substr_dict(data_dict)
+ assert ret == {'value': [{'@odata.type': '#DeviceService.DeviceType', 'Description': 'Server Device', 'DeviceType': 1000, 'Name': 'SERVER'}, {
+ '@odata.type': '#DeviceService.DeviceType', 'Description': 'Chassis Device', 'DeviceType': 2000, 'Name': 'CHASSIS'}]}
+
+ def test_job_submission(self, mocker, mock_response, ome_object):
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {
+ 'JobStatus': "Completed"
+ }
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ ret = ome_object.job_submission(
+ "job_name", "job_desc", "targets", "params", "job_type")
+ assert ret.json_data == mock_response.json_data
+
+ def test_test_network_connection(self, mocker, mock_response, ome_object):
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {
+ 'JobStatus': "Completed"
+ }
+ mocker.patch(MODULE_UTIL_PATH + JOB_SUBMISSION,
+ return_value=mock_response)
+ ret = ome_object.test_network_connection(
+ "share_address", "share_path", "share_type", "share_user", "share_password", "share_domain")
+ assert ret.json_data == mock_response.json_data
+
+ ret = ome_object.test_network_connection(
+ "share_address", "share_path", "share_type")
+ assert ret.json_data == mock_response.json_data
+
+ def test_check_existing_job_state(self, mocker, mock_response, ome_object):
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ mock_response.success = True
+ mock_response.status_code = 200
+ mock_response.json_data = {
+ 'value': [{"JobType": {"Name": "Job_Name_1"}}]
+ }
+ mocker.patch(MODULE_UTIL_PATH + JOB_SUBMISSION,
+ return_value=mock_response)
+ job_allowed, available_jobs = ome_object.check_existing_job_state(
+ "Job_Name_1")
+ assert job_allowed is False
+ assert available_jobs == {"JobType": {"Name": "Job_Name_1"}}
+
+ mock_response.json_data = {
+ 'value': []
+ }
+ mocker.patch(MODULE_UTIL_PATH + JOB_SUBMISSION,
+ return_value=mock_response)
+ job_allowed, available_jobs = ome_object.check_existing_job_state(
+ "Job_Name_1")
+ assert job_allowed is True
+ assert available_jobs == []
+
+ mock_response.json_data = {
+ 'value': [{"JobType": {"Name": "Job_Name_2"}}]
+ }
+ mocker.patch(MODULE_UTIL_PATH + JOB_SUBMISSION,
+ return_value=mock_response)
+ job_allowed, available_jobs = ome_object.check_existing_job_state(
+ "Job_Name_1")
+ assert job_allowed is True
+ assert available_jobs == [{'JobType': {'Name': 'Job_Name_2'}}]
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py
new file mode 100644
index 000000000..2e092af15
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py
@@ -0,0 +1,171 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.3.0
+# Copyright (C) 2023 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, OpenURLResponse
+from mock import MagicMock
+import json
+
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+OPEN_URL = 'redfish.open_url'
+TEST_PATH = "/testpath"
+
+
+class TestRedfishRest(object):
+
+ @pytest.fixture
+ def mock_response(self):
+ mock_response = MagicMock()
+ mock_response.getcode.return_value = 200
+ mock_response.headers = mock_response.getheaders.return_value = {
+ 'X-Auth-Token': 'token_id'}
+ mock_response.read.return_value = json.dumps({"value": "data"})
+ return mock_response
+
+ @pytest.fixture
+ def module_params(self):
+ module_parameters = {'baseuri': '192.168.0.1:443', 'username': 'username',
+ 'password': 'password'}
+ return module_parameters
+
+ @pytest.fixture
+ def redfish_object(self, module_params):
+ redfish_obj = Redfish(module_params=module_params)
+ return redfish_obj
+
+ def test_invoke_request_with_session(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ req_session = True
+ with Redfish(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session(self, mock_response, mocker):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ module_params = {'baseuri': '[2001:db8:3333:4444:5555:6666:7777:8888]:443', 'username': 'username',
+ 'password': 'password', "port": 443}
+ req_session = False
+ with Redfish(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session_with_header(self, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ req_session = False
+ with Redfish(module_params, req_session) as obj:
+ response = obj.invoke_request(TEST_PATH, "POST", headers={
+ "application": "octstream"})
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_with_session_connection_error(self, mocker, mock_response, module_params):
+ mock_response.success = False
+ mock_response.status_code = 500
+ mock_response.json_data = {}
+ mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish.invoke_request',
+ return_value=mock_response)
+ req_session = True
+ with pytest.raises(ConnectionError):
+ with Redfish(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ @pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
+ def test_invoke_request_error_case_handling(self, exc, mock_response, mocker, module_params):
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ side_effect=exc("test"))
+ req_session = False
+ with pytest.raises(exc):
+ with Redfish(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ def test_invoke_request_http_error_handling(self, mock_response, mocker, module_params):
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ open_url_mock.side_effect = HTTPError('https://testhost.com/', 400,
+ 'Bad Request Error', {}, None)
+ req_session = False
+ with pytest.raises(HTTPError):
+ with Redfish(module_params, req_session) as obj:
+ obj.invoke_request(TEST_PATH, "GET")
+
+ @pytest.mark.parametrize("query_params", [
+ {"inp": {"$filter": "UserName eq 'admin'"},
+ "out": "%24filter=UserName+eq+%27admin%27"},
+ {"inp": {"$top": 1, "$skip": 2, "$filter": "JobType/Id eq 8"}, "out":
+ "%24top=1&%24skip=2&%24filter=JobType%2FId+eq+8"},
+ {"inp": {"$top": 1, "$skip": 3}, "out": "%24top=1&%24skip=3"}
+ ])
+ def test_build_url(self, query_params, mocker, redfish_object):
+ """builds complete url"""
+ base_uri = 'https://192.168.0.1:443/api'
+ path = "/AccountService/Accounts"
+ mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
+ return_value=base_uri)
+ inp = query_params["inp"]
+ out = query_params["out"]
+ url = redfish_object._build_url(
+ path, query_param=inp)
+ assert url == base_uri + path + "?" + out
+
+ def test_build_url_none(self, mocker, redfish_object):
+ """builds complete url"""
+ base_uri = 'https://192.168.0.1:443/api'
+ mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
+ return_value=base_uri)
+ url = redfish_object._build_url("", None)
+ assert url == ""
+
+ def test_strip_substr_dict(self, mocker, mock_response, redfish_object):
+ data_dict = {"@odata.context": "/api/$metadata#Collection(DeviceService.DeviceType)",
+ "@odata.count": 5,
+ "value": [
+ {
+ "@odata.type": "#DeviceService.DeviceType",
+ "DeviceType": 1000,
+ "Name": "SERVER",
+ "Description": "Server Device"
+ }
+ ]}
+ ret = redfish_object.strip_substr_dict(data_dict)
+ assert ret == {'value': [{'@odata.type': '#DeviceService.DeviceType',
+ 'Description': 'Server Device', 'DeviceType': 1000, 'Name': 'SERVER'}]}
+
+ def test_invalid_json_openurlresp(self):
+ obj = OpenURLResponse({})
+ obj.body = 'invalid json'
+ with pytest.raises(ValueError) as e:
+ obj.json_data
+ assert e.value.args[0] == "Unable to parse json"
+
+ def test_reason(self):
+ def mock_read():
+ return "{}"
+ obj = MagicMock()
+ obj.reason = "returning reason"
+ obj.read = mock_read
+ ourl = OpenURLResponse(obj)
+ reason_ret = ourl.reason
+ assert reason_ret == "returning reason"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py
index 0cc124f9b..ef7f8d4e3 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/common.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -26,8 +26,8 @@ class Constants:
device_id2 = 4321
service_tag1 = "MXL1234"
service_tag2 = "MXL5467"
- hostname1 = "192.168.0.1"
- hostname2 = "192.168.0.2"
+ hostname1 = "XX.XX.XX.XX"
+ hostname2 = "YY.YY.YY.YY"
class AnsibleFailJSonException(Exception):
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py
index e6f9ae46e..ff455d6d8 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/conftest.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -16,7 +16,7 @@ __metaclass__ = type
import pytest
from ansible.module_utils import basic
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import exit_json, \
fail_json, AnsibleFailJson, AnsibleExitJson
from mock import MagicMock
@@ -57,7 +57,7 @@ def redfish_response_mock(mocker):
@pytest.fixture
def ome_default_args():
- default_args = {'hostname': '192.168.0.1', 'username': 'username', 'password': 'password', "ca_path": "/path/ca_bundle"}
+ default_args = {'hostname': 'XX.XX.XX.XX', 'username': 'username', 'password': 'password', "ca_path": "/path/ca_bundle"}
return default_args
@@ -70,7 +70,7 @@ def idrac_default_args():
@pytest.fixture
def redfish_default_args():
- default_args = {'baseuri': '192.168.0.1', 'username': 'username', 'password': 'password',
+ default_args = {'baseuri': 'XX.XX.XX.XX', 'username': 'username', 'password': 'password',
"ca_path": "/path/to/ca_cert.pem"}
return default_args
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py
index 0386269ec..fb361a38e 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,13 +14,20 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_eventing
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock, PropertyMock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock, Mock, PropertyMock
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
from pytest import importorskip
+from ansible.module_utils._text import to_text
+import json
+from io import StringIO
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
class TestConfigureEventing(FakeAnsibleModule):
module = dellemc_configure_idrac_eventing
@@ -90,7 +97,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
"email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
"enable_alerts": "Enabled", "authentication": "Enabled",
- "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443, "username": "uname",
"password": "pwd"})
message = {"changes_applicable": True, "message": "Changes found to commit!"}
idrac_connection_configure_eventing_mock.config_mgr.is_change_applicable.return_value = message
@@ -106,7 +113,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
"email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
"enable_alerts": "Enabled", "authentication": "Enabled",
- "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443, "username": "uname",
"password": "pwd"})
message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
"Status": "Success"}
@@ -123,7 +130,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"destination": "1.1.1.1", "snmp_v3_username": "snmpuser",
"snmp_trap_state": "Enabled", "alert_number": 4, "email_alert_state": "Enabled",
"address": "abc@xyz", "custom_message": "test", "enable_alerts": "Enabled",
- "authentication": "Enabled", "smtp_ip_address": "192.168.0.1", "smtp_port": 443,
+ "authentication": "Enabled", "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443,
"username": "uname", "password": "pwd"})
message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
"Status": "Success"}
@@ -140,7 +147,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
"email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
"enable_alerts": "Enabled", "authentication": "Enabled",
- "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443, "username": "uname",
"password": "pwd"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "Success"}
@@ -180,7 +187,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
"email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
"enable_alerts": "Enabled", "authentication": "Enabled",
- "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443, "username": "uname",
"password": "pwd"})
message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
idrac_connection_configure_eventing_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
@@ -197,7 +204,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"snmp_v3_username": "snmpuser", "snmp_trap_state": "Enabled", "alert_number": 4,
"email_alert_state": "Enabled", "address": "abc@xyz", "custom_message": "test",
"enable_alerts": "Enabled", "authentication": "Enabled",
- "smtp_ip_address": "192.168.0.1", "smtp_port": 443, "username": "uname",
+ "smtp_ip_address": "XX.XX.XX.XX", "smtp_port": 443, "username": "uname",
"password": "pwd"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "failed"}
@@ -214,7 +221,7 @@ class TestConfigureEventing(FakeAnsibleModule):
"destination": "1.1.1.1", "snmp_v3_username": "snmpuser",
"snmp_trap_state": "Enabled", "alert_number": 4, "email_alert_state": "Enabled",
"address": "abc@xyz", "custom_message": "test", "enable_alerts": "Enabled",
- "authentication": "Enabled", "smtp_ip_address": "192.168.0.1",
+ "authentication": "Enabled", "smtp_ip_address": "XX.XX.XX.XX",
"smtp_port": 443, "username": "uname", "password": "pwd"})
message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
idrac_connection_configure_eventing_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
@@ -224,14 +231,98 @@ class TestConfigureEventing(FakeAnsibleModule):
self.module.run_idrac_eventing_config(idrac_connection_configure_eventing_mock, f_module)
assert ex.value.args[0] == 'Failed to found changes'
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
- def test_main_configure_eventing_exception_handling_case(self, exc_type, mocker, idrac_default_args,
- idrac_connection_configure_eventing_mock,
- idrac_file_manager_config_eventing_mock):
- idrac_default_args.update({"share_name": None, 'share_password': None,
- 'share_mnt': None, 'share_user': None})
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
- 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError, HTTPError, URLError, SSLValidationError, ConnectionError])
+ def test_main_dellemc_configure_idrac_eventing_handling_case(self, exc_type, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock, mocker):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ def test_run_run_idrac_eventing_config_invalid_share(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock, mocker):
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = False
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_eventing.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_eventing_config(
+ idrac_connection_configure_eventing_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
+
+ def test_run_idrac_eventing_config_Error(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock, mocker):
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_eventing.file_share_manager.create_share_obj", return_value=(obj))
+ message = {'Status': 'Failed', 'Message': 'Key Error Expected', "Data1": {
+ 'Message': 'Status failed in checking data'}}
+ idrac_connection_configure_eventing_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_configure_eventing_mock.config_mgr.apply_changes.return_value = "Returned on Key Error"
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_eventing_config(
+ idrac_connection_configure_eventing_mock, f_module)
+ assert exc.value.args[0] == "Key Error Expected"
+
+ def test_dellemc_configure_idrac_eventing_main_cases(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock, mocker):
+ status_msg = {"Status": "Success", "Message": "No changes found"}
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result['changed'] is True
+ assert result['msg'] == "Successfully configured the iDRAC eventing settings."
+ assert result['eventing_status'].get("Message") == "No changes found"
+
+ status_msg = {"Status": "Failed", "Message": "No changes found"}
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_eventing.run_idrac_eventing_config', return_value=status_msg)
+ result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
+ assert result['msg'] == "Failed to configure the iDRAC eventing settings"
+
+ def test_run_idrac_eventing_config_main_cases(self, idrac_connection_configure_eventing_mock,
+ idrac_file_manager_config_eventing_mock, idrac_default_args,
+ is_changes_applicable_eventing_mock, mocker):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "destination_number": 1,
+ "destination": None, "snmp_v3_username": None,
+ "snmp_trap_state": None, "alert_number": 4, "email_alert_state": None,
+ "address": None, "custom_message": None, "enable_alerts": "Enabled",
+ "authentication": "Enabled", "smtp_ip_address": "XX.XX.XX.XX",
+ "smtp_port": 443, "username": "uname", "password": "pwd"})
+
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_eventing.file_share_manager.create_share_obj", return_value=(obj))
+ message = {'Status': 'Success', 'Message': 'Message Success', "Data": {
+ 'Message': 'Status failed in checking data'}}
+ idrac_connection_configure_eventing_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_configure_eventing_mock.config_mgr.is_change_applicable.return_value = {
+ "changes_applicable": False}
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_eventing_config(
+ idrac_connection_configure_eventing_mock, f_module)
+ assert exc.value.args[0] == "No changes found to commit!"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py
index 2606a0343..f2f40c390 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,13 +14,20 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_configure_idrac_services
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock, Mock
from pytest import importorskip
+from ansible.module_utils._text import to_text
+import json
+from io import StringIO
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
class TestConfigServices(FakeAnsibleModule):
module = dellemc_configure_idrac_services
@@ -242,13 +249,106 @@ class TestConfigServices(FakeAnsibleModule):
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
- def test_main_idrac_configure_services_exception_handling_case(self, exc_type, mocker, idrac_default_args,
- idrac_connection_configure_services_mock,
- idrac_file_manager_config_services_mock):
- idrac_default_args.update({"share_name": None})
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
- 'dellemc_configure_idrac_services.run_idrac_services_config', side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError, HTTPError, URLError, SSLValidationError, ConnectionError])
+ def test_main_dellemc_configure_idrac_services_handling_case(self, exc_type, mocker, idrac_default_args, idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_services.run_idrac_services_config',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_configure_idrac_services.run_idrac_services_config',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
assert 'msg' in result
- assert result['failed'] is True
+
+ def test_run_idrac_services_config_invalid_share(self, mocker, idrac_default_args, idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = False
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_services.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
+
+ def test_run_idrac_services_config_Error(self, mocker, idrac_default_args, idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_services.file_share_manager.create_share_obj", return_value=(obj))
+ message = {'Status': 'Failed', 'Message': 'Key Error Expected', "Data1": {
+ 'Message': 'Status failed in checking data'}}
+ idrac_connection_configure_services_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = "Returned on Key Error"
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert exc.value.args[0] == "Key Error Expected"
+
+ def test_run_idrac_services_config_extra_coverage(self, mocker, idrac_default_args, idrac_connection_configure_services_mock,
+ idrac_file_manager_config_services_mock):
+ idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
+ "share_password": None, "enable_web_server": "Enabled", "http_port": 443,
+ "https_port": 343, "timeout": 10, "ssl_encryption": "T_128_Bit_or_higher",
+ "tls_protocol": "TLS_1_1_and_Higher", "snmp_enable": "Enabled",
+ "community_name": "communityname", "snmp_protocol": "All", "alert_port": 445,
+ "discovery_port": 1000, "trap_format": "SNMPv1", "ipmi_lan": {}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "dellemc_configure_idrac_services.file_share_manager.create_share_obj", return_value=(obj))
+ message = {'Status': 'Success', "Data": {
+ 'Message': 'Status failed in checking data'}}
+ idrac_connection_configure_services_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_configure_services_mock.config_mgr.apply_changes.return_value = "Returned on community name none"
+ ret_data = self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert ret_data == "Returned on community name none"
+
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idrac_connection_configure_services_mock.config_mgr.is_change_applicable.return_value = {
+ 'changes_applicable': False}
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_services_config(idrac_connection_configure_services_mock, f_module)
+ assert exc.value.args[0] == "No changes found to commit!"
+
+ idrac_default_args.update({"ipmi_lan": None})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ ret_data = self.module.run_idrac_services_config(
+ idrac_connection_configure_services_mock, f_module)
+ assert ret_data == "Returned on community name none"
+
+ def test_run_idrac_services_config_success_case06(self, idrac_connection_configure_services_mock,
+ idrac_default_args, idrac_file_manager_config_services_mock, mocker):
+ status_msg = {"Status": "Success", "Message": "No changes found"}
+ mocker.patch(
+ MODULE_PATH + 'dellemc_configure_idrac_services.run_idrac_services_config', return_value=status_msg)
+ resp = self._run_module(idrac_default_args)
+ assert resp['changed'] is True
+ assert resp['msg'] == "Successfully configured the iDRAC services settings."
+ assert resp['service_status'].get('Status') == "Success"
+ assert resp['service_status'].get('Message') == "No changes found"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py
deleted file mode 100644
index 657f89e49..000000000
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_firmware_inventory.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-#
-
-from __future__ import (absolute_import, division, print_function)
-
-__metaclass__ = type
-
-import pytest
-from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_firmware_inventory
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, PropertyMock
-from pytest import importorskip
-
-importorskip("omsdk.sdkfile")
-importorskip("omsdk.sdkcreds")
-
-
-class TestFirmware(FakeAnsibleModule):
- module = dellemc_get_firmware_inventory
-
- @pytest.fixture
- def idrac_firmware_mock(self, mocker):
- omsdk_mock = MagicMock()
- idrac_obj = MagicMock()
- omsdk_mock.update_mgr = idrac_obj
- type(idrac_obj).InstalledFirmware = PropertyMock(return_value="msg")
- return idrac_obj
-
- @pytest.fixture
- def idrac_get_firmware_inventory_connection_mock(self, mocker, idrac_firmware_mock):
- idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
- 'dellemc_get_firmware_inventory.iDRACConnection',
- return_value=idrac_firmware_mock)
- idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_mock
- return idrac_firmware_mock
-
- def test_main_idrac_get_firmware_inventory_success_case01(self, idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware.return_value = {"Status": "Success"}
- result = self._run_module(idrac_default_args)
- assert result == {'ansible_facts': {
- idrac_get_firmware_inventory_connection_mock.ipaddr: {
- 'Firmware Inventory': idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware}},
- "changed": False}
-
- def test_run_get_firmware_inventory_success_case01(self, idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- obj2 = MagicMock()
- idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
- type(obj2).InstalledFirmware = PropertyMock(return_value="msg")
- f_module = self.get_module_mock(params=idrac_default_args)
- msg, err = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
- assert msg == {'failed': False,
- 'msg': idrac_get_firmware_inventory_connection_mock.update_mgr.InstalledFirmware}
- assert msg['failed'] is False
- assert err is False
-
- def test_run_get_firmware_inventory_failed_case01(self, idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- f_module = self.get_module_mock(params=idrac_default_args)
- error_msg = "Error in Runtime"
- obj2 = MagicMock()
- idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
- type(obj2).InstalledFirmware = PropertyMock(side_effect=Exception(error_msg))
- msg, err = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
- assert msg['failed'] is True
- assert msg['msg'] == "Error: {0}".format(error_msg)
- assert err is True
-
- def test_run_get_firmware_inventory_failed_case02(self, idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- message = {'Status': "Failed", "Message": "Fetched..."}
- obj2 = MagicMock()
- idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
- type(obj2).InstalledFirmware = PropertyMock(return_value=message)
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.run_get_firmware_inventory(idrac_get_firmware_inventory_connection_mock, f_module)
- assert result == ({'msg': {'Status': 'Failed', 'Message': 'Fetched...'}, 'failed': True}, False)
- if "Status" in result[0]['msg']:
- if not result[0]['msg']['Status'] == "Success":
- assert result[0]['failed'] is True
-
- def test_main_idrac_get_firmware_inventory_faild_case01(self, idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- error_msg = "Error occurs"
- obj2 = MagicMock()
- idrac_get_firmware_inventory_connection_mock.update_mgr = obj2
- type(obj2).InstalledFirmware = PropertyMock(side_effect=Exception(error_msg))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert result['failed'] is True
- assert result['msg'] == "Error: {0}".format(error_msg)
-
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
- def test_main_idrac_get_firmware_inventory_exception_handling_case(self, exc_type, mocker,
- idrac_get_firmware_inventory_connection_mock,
- idrac_default_args):
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_firmware_inventory.'
- 'run_get_firmware_inventory', side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert 'msg' in result
- assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py
deleted file mode 100644
index c398c9f8a..000000000
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_get_system_inventory.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
-
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-#
-
-from __future__ import (absolute_import, division, print_function)
-
-__metaclass__ = type
-
-import pytest
-from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_get_system_inventory
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, Mock
-from pytest import importorskip
-
-importorskip("omsdk.sdkfile")
-importorskip("omsdk.sdkcreds")
-
-
-class TestSystemInventory(FakeAnsibleModule):
- module = dellemc_get_system_inventory
-
- @pytest.fixture
- def idrac_system_inventory_mock(self, mocker):
- omsdk_mock = MagicMock()
- idrac_obj = MagicMock()
- omsdk_mock.get_entityjson = idrac_obj
- type(idrac_obj).get_json_device = Mock(return_value="msg")
- return idrac_obj
-
- @pytest.fixture
- def idrac_get_system_inventory_connection_mock(self, mocker, idrac_system_inventory_mock):
- idrac_conn_class_mock = mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.'
- 'dellemc_get_system_inventory.iDRACConnection',
- return_value=idrac_system_inventory_mock)
- idrac_conn_class_mock.return_value.__enter__.return_value = idrac_system_inventory_mock
- return idrac_system_inventory_mock
-
- def test_main_idrac_get_system_inventory_success_case01(self, idrac_get_system_inventory_connection_mock, mocker,
- idrac_default_args):
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
- return_value=({"msg": "Success"}, False))
- msg = self._run_module(idrac_default_args)
- assert msg['changed'] is False
- assert msg['ansible_facts'] == {idrac_get_system_inventory_connection_mock.ipaddr:
- {'SystemInventory': "Success"}}
-
- def test_run_get_system_inventory_error_case(self, idrac_get_system_inventory_connection_mock, idrac_default_args,
- mocker):
- f_module = self.get_module_mock()
- idrac_get_system_inventory_connection_mock.get_json_device = {"msg": "Success"}
- result, err = self.module.run_get_system_inventory(idrac_get_system_inventory_connection_mock, f_module)
- assert result["failed"] is True
- assert err is True
-
- def test_main_error_case(self, idrac_get_system_inventory_connection_mock, idrac_default_args, mocker):
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
- return_value=({"msg": "Failed"}, True))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert result['failed'] is True
-
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
- def test_main_exception_handling_case(self, exc_type, mocker, idrac_default_args,
- idrac_get_system_inventory_connection_mock):
-
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_get_system_inventory.run_get_system_inventory',
- side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert 'msg' in result
- assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py
index 1ae8b22c0..c1c3fd5d2 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,13 +14,20 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_lc_attributes
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock, Mock
from pytest import importorskip
+from ansible.module_utils._text import to_text
+import json
+from io import StringIO
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
class TestLcAttributes(FakeAnsibleModule):
module = dellemc_idrac_lc_attributes
@@ -58,7 +65,8 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_default_args, mocker, idrac_file_manager_lc_attribute_mock):
idrac_default_args.update({"share_name": None, 'share_password': None,
'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
- message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
+ message = {'changed': False, 'msg': {
+ 'Status': "Success", "message": "No changes found to commit!"}}
mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
return_value=message)
with pytest.raises(Exception) as ex:
@@ -69,7 +77,8 @@ class TestLcAttributes(FakeAnsibleModule):
return_value=status_msg)
result = self._run_module(idrac_default_args)
assert result["msg"] == "Successfully configured the iDRAC LC attributes."
- status_msg = {"Status": "Success", "Message": "No changes were applied"}
+ status_msg = {"Status": "Success",
+ "Message": "No changes were applied"}
mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
return_value=status_msg)
result = self._run_module(idrac_default_args)
@@ -79,17 +88,23 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_file_manager_lc_attribute_mock):
idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
"share_password": None, "csior": "csior"})
- message = {"changes_applicable": True, "message": "changes are applicable"}
+ message = {"changes_applicable": True,
+ "message": "changes are applicable"}
idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
with pytest.raises(Exception) as ex:
- self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert ex.value.args[0] == "Changes found to commit!"
- status_msg = {"changes_applicable": False, "message": "no changes are applicable"}
+ status_msg = {"changes_applicable": False,
+ "message": "no changes are applicable"}
idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = status_msg
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
with pytest.raises(Exception) as ex:
- self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert ex.value.args[0] == "No changes found to commit!"
def test_run_setup_idrac_csior_success_case02(self, idrac_connection_lc_attribute_mock, idrac_default_args,
@@ -101,7 +116,8 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
- msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ msg = self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert msg == {'changes_applicable': True, 'message': 'changes found to commit!',
'changed': True, 'Status': 'Success'}
@@ -114,7 +130,8 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
- msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ msg = self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert msg == {'changes_applicable': True, 'Message': 'No changes found to commit!',
'changed': False, 'Status': 'Success'}
@@ -127,9 +144,11 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_connection_lc_attribute_mock.config_mgr = obj
type(obj).disable_csior = Mock(return_value=message)
idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
with pytest.raises(Exception) as ex:
- self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert ex.value.args[0] == "Changes found to commit!"
def test_run_setup_csior_enable_case(self, idrac_connection_lc_attribute_mock, idrac_default_args,
@@ -141,21 +160,25 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_connection_lc_attribute_mock.config_mgr = obj
type(obj).enable_csior = Mock(return_value='Enabled')
idrac_connection_lc_attribute_mock.config_mgr.is_change_applicable.return_value = message
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
with pytest.raises(Exception) as ex:
- self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert ex.value.args[0] == "Changes found to commit!"
def test_run_setup_csior_failed_case01(self, idrac_connection_lc_attribute_mock, idrac_default_args,
idrac_file_manager_lc_attribute_mock):
idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
"share_password": None, "csior": "csior"})
- message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
+ message = {'Status': 'Failed', "Data": {
+ 'Message': 'status failed in checking Data'}}
idrac_connection_lc_attribute_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
idrac_connection_lc_attribute_mock.config_mgr.set_liason_share.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
with pytest.raises(Exception) as ex:
- self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert ex.value.args[0] == "status failed in checking Data"
def test_run_setup_idrac_csior_failed_case03(self, idrac_connection_lc_attribute_mock, idrac_default_args,
@@ -167,19 +190,64 @@ class TestLcAttributes(FakeAnsibleModule):
idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
- msg = self.module.run_setup_idrac_csior(idrac_connection_lc_attribute_mock, f_module)
+ msg = self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
assert msg == {'changes_applicable': False, 'Message': 'Failed to found changes',
'changed': False, 'Status': 'Failed', "failed": True}
assert msg['changed'] is False
assert msg['failed'] is True
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError, HTTPError, URLError, SSLValidationError, ConnectionError])
def test_main_lc_attribute_exception_handling_case(self, exc_type, mocker, idrac_connection_lc_attribute_mock,
idrac_default_args, idrac_file_manager_lc_attribute_mock):
idrac_default_args.update({"share_name": None, 'share_password': None,
'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_idrac_lc_attributes.run_setup_idrac_csior',
- side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_idrac_lc_attributes.run_setup_idrac_csior',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
assert 'msg' in result
- assert result['failed'] is True
+
+ def test_run_setup_idrac_csior_invalid_share(self, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock, mocker):
+ idrac_default_args.update({"share_name": None, 'share_password': None,
+ 'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = False
+ mocker.patch(
+ MODULE_PATH + "dellemc_idrac_lc_attributes.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
+
+ @pytest.mark.parametrize("exc_type", [KeyError])
+ def test_run_setup_idrac_csior_Error(self, exc_type, idrac_connection_lc_attribute_mock, idrac_default_args,
+ idrac_file_manager_lc_attribute_mock, mocker):
+ idrac_default_args.update({"share_name": None, 'share_password': None,
+ 'csior': 'Enabled', 'share_mnt': None, 'share_user': None})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "dellemc_idrac_lc_attributes.file_share_manager.create_share_obj", return_value=(obj))
+ message = {'Status': 'Failed', 'Message': 'Key Error Expected', "Data1": {
+ 'Message': 'Status failed in checking data'}}
+ idrac_connection_lc_attribute_mock.config_mgr.set_liason_share.return_value = message
+ idrac_connection_lc_attribute_mock.config_mgr.apply_changes.return_value = "Returned on Key Error"
+ with pytest.raises(Exception) as exc:
+ self.module.run_setup_idrac_csior(
+ idrac_connection_lc_attribute_mock, f_module)
+ assert exc.value.args[0] == "Key Error Expected"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py
index c3a0dff19..c95fccf01 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,8 +15,8 @@ __metaclass__ = type
import pytest
import os
from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_idrac_storage_volume
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock, Mock
from pytest import importorskip
importorskip("omsdk.sdkfile")
@@ -221,7 +221,7 @@ class TestStorageVolume(FakeAnsibleModule):
mocker):
idrac_default_args.update({"share_name": "sharename", "state": "create", "controller_id": "XYZ123",
"capacity": 1.4, "stripe_size": 1, "volumes": [{"drives": {"id": ["data"],
- "location":[1]}}]})
+ "location": [1]}}]})
with pytest.raises(ValueError) as ex:
self.module._validate_options(idrac_default_args)
assert "Either {0} or {1} is allowed".format("id", "location") == str(ex.value)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py
index 768c62bfc..5ee3c9201 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,13 +14,20 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import dellemc_system_lockdown_mode
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock
from pytest import importorskip
+from ansible.module_utils._text import to_text
+import json
+from io import StringIO
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
class TestSysytemLockdownMode(FakeAnsibleModule):
module = dellemc_system_lockdown_mode
@@ -77,17 +84,27 @@ class TestSysytemLockdownMode(FakeAnsibleModule):
self._run_module_with_fail_json(idrac_default_args)
assert ex.value.args[0]['msg'] == "Failed to complete the lockdown mode operations."
- @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError])
+ @pytest.mark.parametrize("exc_type", [ImportError, ValueError, RuntimeError, HTTPError, URLError, SSLValidationError, ConnectionError])
def test_main_exception_handling_case(self, exc_type, mocker, idrac_connection_system_lockdown_mode_mock,
idrac_file_manager_system_lockdown_mock, idrac_default_args):
idrac_default_args.update({"share_name": None, "share_password": None,
"lockdown_mode": "Enabled"})
- idrac_connection_system_lockdown_mode_mock.config_mgr.set_liason_share.return_value = {"Status": "Failed"}
- mocker.patch('ansible_collections.dellemc.openmanage.plugins.modules.dellemc_system_lockdown_mode.run_system_lockdown_mode',
- side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_system_lockdown_mode.run_system_lockdown_mode',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH +
+ 'dellemc_system_lockdown_mode.run_system_lockdown_mode',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
assert 'msg' in result
- assert result['failed'] is True
def test_run_system_lockdown_mode_success_case01(self, idrac_connection_system_lockdown_mode_mock, mocker,
idrac_file_manager_system_lockdown_mock, idrac_default_args):
@@ -124,3 +141,31 @@ class TestSysytemLockdownMode(FakeAnsibleModule):
with pytest.raises(Exception) as ex:
self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
assert ex.value.args[0] == "message inside data"
+
+ def test_run_system_lockdown_mode_invalid_share(self, idrac_connection_system_lockdown_mode_mock, mocker,
+ idrac_file_manager_system_lockdown_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": None, "share_password": None,
+ "lockdown_mode": "EnabledDisabled", "share_mnt": None, "share_user": None})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ obj = MagicMock()
+ obj.IsValid = False
+
+ mocker.patch(
+ MODULE_PATH + "dellemc_system_lockdown_mode.tempfile.gettempdir", return_value=(obj))
+ message = {"Message": "message inside data"}
+ idrac_connection_system_lockdown_mode_mock.config_mgr.disable_system_lockdown.return_value = message
+ msg = self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
+ assert msg == {'changed': False, 'failed': False, 'msg': "Successfully completed the lockdown mode operations."}
+
+ idrac_default_args.update({"lockdown_mode": "Disabled"})
+ message = {"Message": "message inside data"}
+ idrac_connection_system_lockdown_mode_mock.config_mgr.disable_system_lockdown.return_value = message
+ msg = self.module.run_system_lockdown_mode(idrac_connection_system_lockdown_mode_mock, f_module)
+ assert msg['system_lockdown_status']['Message'] == "message inside data"
+
+ mocker.patch(
+ MODULE_PATH + "dellemc_system_lockdown_mode.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_system_lockdown_mode(
+ idrac_connection_system_lockdown_mode_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py
index d5c225230..42bb58f62 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_attributes.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.1.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -13,14 +13,11 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
-import os
-import tempfile
from io import StringIO
import pytest
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_attributes
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock
@@ -35,31 +32,20 @@ IDRAC_URI = "/redfish/v1/Managers/{res_id}/Oem/Dell/DellAttributes/{attr_id}"
MANAGERS_URI = "/redfish/v1/Managers"
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_attributes.'
UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+SNMP_ADDRESS = "SNMP.1.IPAddress"
@pytest.fixture
-def idrac_redfish_mock_for_attr(mocker, ome_response_mock):
+def idrac_redfish_mock_for_attr(mocker, redfish_response_mock):
connection_class_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI')
- ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
- ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
- return ome_connection_mock_obj
+ idrac_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ idrac_connection_mock_obj.invoke_request.return_value = redfish_response_mock
+ return idrac_connection_mock_obj
class TestIdracAttributes(FakeAnsibleModule):
module = idrac_attributes
- @pytest.fixture
- def idrac_attributes_mock(self):
- idrac_obj = MagicMock()
- return idrac_obj
-
- @pytest.fixture
- def idrac_connection_attributes_mock(self, mocker, idrac_attributes_mock):
- idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
- return_value=idrac_attributes_mock)
- idrac_conn_mock.return_value.__enter__.return_value = idrac_attributes_mock
- return idrac_conn_mock
-
@pytest.mark.parametrize("params", [{"id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
"uri_dict":
{"iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
@@ -72,55 +58,6 @@ class TestIdracAttributes(FakeAnsibleModule):
diff, response_attr = self.module.get_response_attr(idrac_redfish_mock_for_attr, params["id"], params["attr"], params["uri_dict"])
assert response_attr.keys() == params["response_attr"].keys()
- @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'public'},
- "uri_dict": {
- "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
- "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
- "LifecycleController.Embedded.1":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
- "response_attr": {"SNMP.1.AgentCommunity": "public"},
- "mparams": {'idrac_attributes': {"SNMP.1.AgentCommunity": "public"}
- }
- }])
- def _test_fetch_idrac_uri_attr(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
- idrac_default_args.update(params.get('mparams'))
- f_module = self.get_module_mock(params=idrac_default_args)
- diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr =\
- self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, params["res_id"])
- assert idrac_response_attr.keys() == params["response_attr"].keys()
-
- @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
- "uri_dict": {
- "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
- "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
- "LifecycleController.Embedded.1":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
- "response_attr": {"ThermalSettings.1.ThermalProfile": "Sound Cap"},
- "mparams": {'system_attributes': {"ThermalSettings.1.ThermalProfile": "Sound Cap"}
- }}])
- def _test_fetch_idrac_uri_attr_succes_case01(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
- idrac_default_args.update(params.get('mparams'))
- f_module = self.get_module_mock(params=idrac_default_args)
- diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr = self.module.fetch_idrac_uri_attr(
- idrac_redfish_mock_for_attr, f_module, params["res_id"])
- assert system_response_attr.keys() == params["response_attr"].keys()
-
- @pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
- "uri_dict": {
- "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
- "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
- "LifecycleController.Embedded.1":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
- "response_attr": {"LCAttributes.1.AutoUpdate": "Enabled"},
- "mparams": {'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}
- }}])
- def _test_fetch_idrac_uri_attr_succes_case02(self, params, idrac_redfish_mock_for_attr, idrac_default_args):
- idrac_default_args.update(params.get('mparams'))
- f_module = self.get_module_mock(params=idrac_default_args)
- diff, uri_dict, idrac_response_attr, system_response_attr, lc_response_attr = self.module.fetch_idrac_uri_attr(
- idrac_redfish_mock_for_attr, f_module, params["res_id"])
- assert lc_response_attr.keys() == params["response_attr"].keys()
-
@pytest.mark.parametrize("params", [{"res_id": "iDRAC.Embedded.1", "attr": {'SNMP.1.AgentCommunity': 'Disabled'},
"uri_dict": {
"iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
@@ -257,51 +194,217 @@ class TestIdracAttributes(FakeAnsibleModule):
params["lc_response_attr"])
assert resp.keys() == params["resp"].keys()
- @pytest.mark.parametrize("params",
- [{"json_data": {},
- "diff": 1,
- "uri_dict": {
- "iDRAC.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1",
- "System.Embedded.1": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1",
- "LifecycleController.Embedded.1":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1"},
- "system_response_attr": {"ThermalSettings.1.ThermalProfile": "Sound Cap"},
- "mparams": {'system_attributes': {"ThermalSettings.1.ThermalProfile": "Sound Cap"}},
- "idrac_response_attr": {},
- "lc_response_attr": {},
- "message": "Successfully updated the attributes."
- }])
- def _test_idrac_attributes(self, params, idrac_connection_attributes_mock, idrac_default_args, mocker):
- idrac_connection_attributes_mock.success = params.get("success", True)
- idrac_connection_attributes_mock.json_data = params.get('json_data')
- idrac_default_args.update(params.get('mparams'))
- f_module = self.get_module_mock(params=idrac_default_args)
- mocker.patch(UTILS_PATH + 'get_manager_res_id', return_value=MANAGER_ID)
- mocker.patch(MODULE_PATH + 'fetch_idrac_uri_attr', return_value=(params["diff"],
- params["uri_dict"],
- params["idrac_response_attr"],
- params["system_response_attr"],
- params["lc_response_attr"]))
- mocker.patch(MODULE_PATH + 'update_idrac_attributes', return_value=params["resp"])
- result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
- assert result['msg'] == params['message']
-
- @pytest.mark.parametrize("exc_type", [HTTPError, URLError])
- def _test_main_idrac_attributes_exception_handling_case(self, exc_type, idrac_connection_attributes_mock, idrac_default_args, mocker):
+ @pytest.mark.parametrize("exc_type", [HTTPError, URLError, IOError, ValueError, TypeError, ConnectionError,
+ AttributeError, IndexError, KeyError])
+ def test_main_idrac_attributes_exception_handling_case(self, exc_type, idrac_redfish_mock_for_attr,
+ idrac_default_args, mocker):
idrac_default_args.update({'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}})
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError]:
- mocker.patch(
- MODULE_PATH + 'update_idrac_attributes',
- side_effect=exc_type('test'))
+ mocker.patch(MODULE_PATH + 'update_idrac_attributes', side_effect=exc_type('test'))
else:
- mocker.patch(
- MODULE_PATH + 'update_idrac_attributes',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
- result = self._run_module_with_fail_json(idrac_default_args)
+ mocker.patch(MODULE_PATH + 'update_idrac_attributes',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ def test_xml_data_conversion(self, idrac_redfish_mock_for_attr, idrac_default_args):
+ attribute = {"Time.1.Timezone": "CST6CDT", "SNMP.1.SNMPProtocol": "All",
+ "LCAttributes.1.AutoUpdate": "Disabled"}
+ result = self.module.xml_data_conversion(attribute, "System.Embedded.1")
+ assert isinstance(result[0], str)
+ assert isinstance(result[1], dict)
+
+ def test_validate_attr_name(self, idrac_redfish_mock_for_attr, idrac_default_args):
+ attribute = [{"Name": "Time.1.Timezone", "Value": "CST6CDT"}, {"Name": "SNMP.1.SNMPProtocol", "Value": "All"},
+ {"Name": "LCAttributes.1.AutoUpdate", "Value": "Disabled"}]
+ req_data = {"Time.1.Timezone": "CST6CDT", "SNMP.1.SNMPProtocol": "All",
+ "LCAttributes.1.AutoUpdate": "Disabled"}
+ result = self.module.validate_attr_name(attribute, req_data)
+ assert result[0] == {'Time.1.Timezone': 'CST6CDT', 'SNMP.1.SNMPProtocol': 'All',
+ 'LCAttributes.1.AutoUpdate': 'Disabled'}
+ assert result[1] == {}
+ req_data = {"Time.2.Timezone": "CST6CDT", "SNMP.2.SNMPProtocol": "All"}
+ result = self.module.validate_attr_name(attribute, req_data)
+ assert result[0] == {}
+ assert result[1] == {'Time.2.Timezone': 'Attribute does not exist.',
+ 'SNMP.2.SNMPProtocol': 'Attribute does not exist.'}
+
+ def test_process_check_mode(self, idrac_redfish_mock_for_attr, idrac_default_args):
+ idrac_default_args.update({'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as exc:
+ self.module.process_check_mode(f_module, False)
+ assert exc.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = True
+ with pytest.raises(Exception) as exc:
+ self.module.process_check_mode(f_module, True)
+ assert exc.value.args[0] == "Changes found to be applied."
+
+ def test_scp_idrac_attributes(self, idrac_redfish_mock_for_attr, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({'lifecycle_controller_attributes': {"LCAttributes.1.AutoUpdate": "Enabled"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'get_check_mode', return_value=None)
+ mocker.patch(MODULE_PATH + 'xml_data_conversion', return_value=("<components></components>",
+ {"LCAttributes.1.AutoUpdate": "Enabled"}))
+ idrac_redfish_mock_for_attr.wait_for_job_completion.return_value = {"JobStatus": "Success"}
+ result = self.module.scp_idrac_attributes(f_module, idrac_redfish_mock_for_attr, "LC.Embedded.1")
+ assert result["JobStatus"] == "Success"
+ idrac_default_args.update({'idrac_attributes': {"User.1.UserName": "username"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'xml_data_conversion', return_value=("<components></components>",
+ {"User.1.UserName": "username"}))
+ idrac_redfish_mock_for_attr.wait_for_job_completion.return_value = {"JobStatus": "Success"}
+ result = self.module.scp_idrac_attributes(f_module, idrac_redfish_mock_for_attr, MANAGER_ID)
+ assert result["JobStatus"] == "Success"
+ idrac_default_args.update({'system_attributes': {SNMP_ADDRESS: "XX.XX.XX.XX"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'xml_data_conversion', return_value=("<components></components>",
+ {SNMP_ADDRESS: "XX.XX.XX.XX"}))
+ idrac_redfish_mock_for_attr.wait_for_job_completion.return_value = {"JobStatus": "Success"}
+ result = self.module.scp_idrac_attributes(f_module, idrac_redfish_mock_for_attr, "System.Embedded.1")
+ assert result["JobStatus"] == "Success"
+
+ def test_get_check_mode(self, idrac_redfish_mock_for_attr, redfish_response_mock, idrac_default_args, mocker):
+ idrac_json = {SNMP_ADDRESS: "XX.XX.XX.XX"}
+ idrac_default_args.update({'idrac_attributes': idrac_json})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ response_obj = MagicMock()
+ idrac_redfish_mock_for_attr.export_scp.return_value = response_obj
+ response_obj.json_data = {
+ "SystemConfiguration": {"Components": [
+ {"FQDD": MANAGER_ID, "Attributes": {"Name": SNMP_ADDRESS, "Value": "XX.XX.XX.XX"}}
+ ]}}
+ mocker.patch(MODULE_PATH + 'validate_attr_name', return_value=(
+ idrac_json, {"SNMP.10.IPAddress": "Attribute does not exists."}))
+ with pytest.raises(Exception) as exc:
+ self.module.get_check_mode(f_module, idrac_redfish_mock_for_attr, idrac_json, {}, {})
+ assert exc.value.args[0] == "Attributes have invalid values."
+ system_json = {"System.1.Attr": "Value"}
+ idrac_default_args.update({'system_attributes': system_json})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ response_obj.json_data = {
+ "SystemConfiguration": {"Components": [
+ {"FQDD": "System.Embedded.1", "Attributes": {"Name": "System.1.Attr", "Value": "Value"}}
+ ]}}
+ mocker.patch(MODULE_PATH + 'validate_attr_name', return_value=(
+ system_json, {"System.10.Attr": "Attribute does not exists."}))
+ with pytest.raises(Exception) as exc:
+ self.module.get_check_mode(f_module, idrac_redfish_mock_for_attr, {}, system_json, {})
+ assert exc.value.args[0] == "Attributes have invalid values."
+ lc_json = {"LCAttributes.1.AutoUpdate": "Enabled"}
+ idrac_default_args.update({'lifecycle_controller_attributes': lc_json})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ response_obj.json_data = {
+ "SystemConfiguration": {"Components": [
+ {"FQDD": "LifecycleController.Embedded.1", "Attributes": {"Name": "LCAttributes.1.AutoUpdate",
+ "Value": "Enabled"}}
+ ]}}
+ mocker.patch(MODULE_PATH + 'validate_attr_name', return_value=(
+ lc_json, {"LCAttributes.10.AutoUpdate": "Attribute does not exists."}))
+ with pytest.raises(Exception) as exc:
+ self.module.get_check_mode(f_module, idrac_redfish_mock_for_attr, {}, {}, lc_json)
+ assert exc.value.args[0] == "Attributes have invalid values."
+ lc_json = {"LCAttributes.1.AutoUpdate": "Enabled"}
+ idrac_default_args.update({'lifecycle_controller_attributes': lc_json})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + 'validate_attr_name', return_value=(lc_json, None))
+ with pytest.raises(Exception) as exc:
+ self.module.get_check_mode(f_module, idrac_redfish_mock_for_attr, {}, {}, lc_json)
+ assert exc.value.args[0] == "No changes found to be applied."
+ mocker.patch(MODULE_PATH + 'validate_attr_name', return_value=({"LCAttributes.1.AutoUpdate": "Disabled"}, None))
+ with pytest.raises(Exception) as exc:
+ self.module.get_check_mode(f_module, idrac_redfish_mock_for_attr, {}, {}, lc_json)
+ assert exc.value.args[0] == "Changes found to be applied."
+
+ def test_fetch_idrac_uri_attr(self, idrac_redfish_mock_for_attr, redfish_response_mock, idrac_default_args, mocker):
+ idrac_json = {SNMP_ADDRESS: "XX.XX.XX.XX"}
+ idrac_default_args.update({'idrac_attributes': idrac_json})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ response_obj = MagicMock()
+ idrac_redfish_mock_for_attr.invoke_request.return_value = response_obj
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {"DellAttributes": {}}}},
+ "Message": "None", "MessageId": "SYS069"}
+ response_obj.status_code = 200
+ mocker.patch(MODULE_PATH + "scp_idrac_attributes", return_value=response_obj)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, MANAGER_ID)
+ assert exc.value.args[0] == "No changes found to be applied."
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {"DellAttributes": {}}}},
+ "Message": "None", "MessageId": "SYS053"}
+ mocker.patch(MODULE_PATH + "scp_idrac_attributes", return_value=response_obj)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, MANAGER_ID)
+ assert exc.value.args[0] == "Successfully updated the attributes."
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {"DellAttributes": {}}}},
+ "Message": "Unable to complete application of configuration profile values.",
+ "MessageId": "SYS080"}
+ mocker.patch(MODULE_PATH + "scp_idrac_attributes", return_value=response_obj)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, MANAGER_ID)
+ assert exc.value.args[0] == "Application of some of the attributes failed due to invalid value or enumeration."
+
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {"DellAttributes": {}}}},
+ "Message": "Unable to complete the task.", "MessageId": "SYS080"}
+ mocker.patch(MODULE_PATH + "scp_idrac_attributes", return_value=response_obj)
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, MANAGER_ID)
+ assert exc.value.args[0] == "Unable to complete the task."
+
+ def test_main_success(self, idrac_redfish_mock_for_attr, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"resource_id": "System.Embedded.1", "idrac_attributes": {"Attr": "Value"}})
+ mocker.patch(MODULE_PATH + "fetch_idrac_uri_attr", return_value=(None, None, None, None, None))
+ mocker.patch(MODULE_PATH + "process_check_mode", return_value=None)
+ mocker.patch(MODULE_PATH + "update_idrac_attributes", return_value=None)
+ result = self._run_module(idrac_default_args)
+ assert result["changed"]
+ assert result["msg"] == "Successfully updated the attributes."
+
+ def test_validate_vs_registry(self, idrac_redfish_mock_for_attr, redfish_response_mock, idrac_default_args):
+ idrac_default_args.update({"resource_id": "System.Embedded.1", "idrac_attributes": {"Attr": "Value"}})
+ attr_dict = {"attr": "value", "attr1": "value1", "attr2": 3}
+ registry = {"attr": {"Readonly": True},
+ "attr1": {"Type": "Enumeration", "Value": [{"ValueDisplayName": "Attr"}]},
+ "attr2": {"Type": "Integer", "LowerBound": 1, "UpperBound": 2}}
+ result = self.module.validate_vs_registry(registry, attr_dict)
+ assert result["attr"] == "Read only Attribute cannot be modified."
+ assert result["attr1"] == "Invalid value for Enumeration."
+ assert result["attr2"] == "Integer out of valid range."
+
+ def test_fetch_idrac_uri_attr_dell_attr(self, idrac_redfish_mock_for_attr, redfish_response_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"resource_id": "System.Embedded.1", "idrac_attributes": {"Attr": "Value"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + "get_response_attr", return_value=(1, None))
+ mocker.patch(MODULE_PATH + "validate_vs_registry", return_value={"Attr": "Attribute does not exists"})
+ response_obj = MagicMock()
+ idrac_redfish_mock_for_attr.invoke_request.return_value = response_obj
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {
+ "DellAttributes": [
+ {"@odata.id": "/api/services/"}
+ ]
+ }}}}
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, "System.Embedded.1")
+ assert exc.value.args[0] == "Attributes have invalid values."
+
+ idrac_default_args.update({"resource_id": "System.Embedded.1", "system_attributes": {"Attr": "Value"}})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + "get_response_attr", return_value=(1, None))
+ mocker.patch(MODULE_PATH + "validate_vs_registry", return_value={"Attr": "Attribute does not exists"})
+ response_obj = MagicMock()
+ idrac_redfish_mock_for_attr.invoke_request.return_value = response_obj
+ response_obj.json_data = {"Links": {"Oem": {"Dell": {
+ "DellAttributes": [
+ {"@odata.id": "/api/services/"}
+ ]
+ }}}}
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_idrac_uri_attr(idrac_redfish_mock_for_attr, f_module, "System.Embedded.1")
+ assert exc.value.args[0] == "Attributes have invalid values."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py
index 3ea74c90a..edbb5b4ea 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_bios.py
@@ -402,7 +402,7 @@ class TestConfigBios(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'run_server_bios_config',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
@@ -585,3 +585,10 @@ class TestConfigBios(FakeAnsibleModule):
]
result = self.module.check_params(params.get('each'), fields)
assert result == params.get('message')
+
+ def test_validate_negative_job_time_out(self, idrac_default_args):
+ idrac_default_args.update({"job_wait": True, "job_wait_timeout": -5})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.validate_negative_job_time_out(f_module)
+ assert ex.value.args[0] == "The parameter job_wait_timeout value cannot be negative or zero."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
index 2e754888f..d5f43360f 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2022-23 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -15,13 +15,12 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_boot
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
-from mock import PropertyMock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from mock import MagicMock
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
@@ -47,9 +46,14 @@ class TestConfigBios(FakeAnsibleModule):
"BootSourceOverrideEnabled": "Disabled", "BootSourceOverrideMode": "Legacy",
"BootSourceOverrideTarget": "None", "UefiTargetBootSourceOverride": None,
"BootSourceOverrideTarget@Redfish.AllowableValues": []},
- "Actions": {"#ComputerSystem.Reset": {"ResetType@Redfish.AllowableValues": ["GracefulShutdown"]}}}
+ "Actions": {"#ComputerSystem.Reset": {"ResetType@Redfish.AllowableValues": ["ForceRestart"]}}}
result = self.module.get_response_attributes(f_module, boot_connection_mock, "System.Embedded.1")
assert result["BootSourceOverrideEnabled"] == "Disabled"
+
+ redfish_response_mock.json_data.pop("Actions")
+ result = self.module.get_response_attributes(f_module, boot_connection_mock, "System.Embedded.1")
+ assert result["BootSourceOverrideEnabled"] == "Disabled"
+
redfish_response_mock.json_data["Boot"].pop("BootOptions", None)
with pytest.raises(Exception) as err:
self.module.get_response_attributes(f_module, boot_connection_mock, "System.Embedded.1")
@@ -74,7 +78,7 @@ class TestConfigBios(FakeAnsibleModule):
def test_system_reset(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
mocker.patch(MODULE_PATH + 'idrac_boot.idrac_system_reset', return_value=(True, False, "Completed", {}))
- idrac_default_args.update({"boot_source_override_mode": "uefi", "reset_type": "graceful_restart"})
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "reset_type": "force_restart"})
f_module = self.get_module_mock(params=idrac_default_args)
reset, track_failed, reset_msg, resp_data = self.module.system_reset(f_module, boot_connection_mock,
"System.Embedded.1")
@@ -90,9 +94,30 @@ class TestConfigBios(FakeAnsibleModule):
status, job = self.module.get_scheduled_job(boot_connection_mock)
assert status is True
+ def test_get_scheduled_job_job_state_not_none(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'idrac_boot.time', return_value=None)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Members": []}
+ is_job, progress_job = self.module.get_scheduled_job(boot_connection_mock, ["Scheduled", "New", "Running"])
+ print(progress_job)
+ assert is_job is False
+
+ def test_get_scheduled_job_progress_job_none(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'idrac_boot.time', return_value=None)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Members": [{
+ "Description": "Job Instance", "EndTime": "TIME_NA", "Id": "JID_609237056489", "JobState": "Completed",
+ "JobType": "BIOSConfiguration", "Message": "Job scheduled successfully.", "MessageArgs": [],
+ "MessageId": "PR19", "Name": "Configure: BIOS.Setup.1-1", "PercentComplete": 10}]}
+ status, job = self.module.get_scheduled_job(boot_connection_mock)
+ assert status is False
+
def test_configure_boot_options(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "none",
"job_wait_timeout": 900})
+ obj = MagicMock()
+ obj.json_data = {"JobState": "Reset Successful"}
+
f_module = self.get_module_mock(params=idrac_default_args)
mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(True, {}))
resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
@@ -119,7 +144,7 @@ class TestConfigBios(FakeAnsibleModule):
"BootSourceOverrideMode": "UEFI", "BootSourceOverrideTarget": "UefiTarget",
"UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
- idrac_default_args.update({"boot_source_override_mode": "legacy"})
+ idrac_default_args.update({"boot_source_override_mode": "legacy", "reset_type": "force_restart"})
f_module = self.get_module_mock(params=idrac_default_args)
redfish_response_mock.json_data = {"Attributes": {"UefiBootSeq": [
{"Name": "Boot001", "Id": 0, "Enabled": True}, {"Name": "Boot000", "Id": 1, "Enabled": True}]}}
@@ -127,6 +152,40 @@ class TestConfigBios(FakeAnsibleModule):
self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
assert err.value.args[0] == "This job is not complete after 900 seconds."
+ mocker.patch(MODULE_PATH + 'idrac_boot.system_reset', return_value=(False, False, "Completed Reset", None))
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert err.value.args[0] == "Completed Reset"
+
+ redfish_response_mock.status_code = 200
+ redfish_response_mock.success = True
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(False, {}))
+ job_data = self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert job_data == {}
+
+ def test_configure_boot_options_v2(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "none",
+ "job_wait_timeout": 900})
+ obj = MagicMock()
+ obj.json_data = {"JobState": "Reset Successful"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(True, {}))
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ redfish_response_mock.status_code = 202
+ redfish_response_mock.success = True
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"}
+ redfish_response_mock.json_data = {"Attributes": {"BootSeq": [{"Name": "Boot001", "Id": 0, "Enabled": True},
+ {"Name": "Boot000", "Id": 1, "Enabled": True}]}}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(False, {}))
+ mocker.patch(MODULE_PATH + 'idrac_boot.system_reset', return_value=(True, False, "Completed", obj))
+ mocker.patch(MODULE_PATH + 'idrac_boot.wait_for_idrac_job_completion',
+ return_value=(obj, ""))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ job_data = self.module.configure_boot_options(f_module, boot_connection_mock, "System.Embedded.1", {"Boot001": False})
+ assert job_data == {"JobState": "Reset Successful"}
+
def test_apply_boot_settings(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "none",
"job_wait_timeout": 900})
@@ -142,6 +201,32 @@ class TestConfigBios(FakeAnsibleModule):
self.module.apply_boot_settings(f_module, boot_connection_mock, payload, "System.Embedded.1")
assert err.value.args[0] == "This job is not complete after 900 seconds."
+ redfish_response_mock.status_code = 400
+ job_data = self.module.apply_boot_settings(f_module, boot_connection_mock, payload, "System.Embedded.1")
+ assert job_data == {}
+
+ def test_apply_boot_settings_reset_type(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "uefi", "job_wait": True, "reset_type": "graceful_restart",
+ "job_wait_timeout": 900})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ payload = {"Boot": {"BootSourceOverrideMode": "UEFI"}}
+ redfish_response_mock.success = True
+ redfish_response_mock.status_code = 200
+
+ obj = MagicMock()
+ obj.json_data = {"JobState": "Reset Successful"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.system_reset', return_value=(False, False, "Completed", obj))
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(False, [{"Id": "JID_123456789"}]))
+ job_data = self.module.apply_boot_settings(f_module, boot_connection_mock, payload, "System.Embedded.1")
+ assert job_data == {"JobState": "Reset Successful"}
+
+ mocker.patch(MODULE_PATH + 'idrac_boot.system_reset', return_value=(True, False, "Completed", {}))
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_scheduled_job', return_value=(True, [{"Id": "JID_123456789"}]))
+ mocker.patch(MODULE_PATH + 'idrac_boot.wait_for_idrac_job_completion',
+ return_value=(obj, ""))
+ job_data = self.module.apply_boot_settings(f_module, boot_connection_mock, payload, "System.Embedded.1")
+ assert job_data == {"JobState": "Reset Successful"}
+
def test_configure_boot_settings(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"boot_order": ["Boot005", "Boot001"], "job_wait": True, "reset_type": "none",
"job_wait_timeout": 900, "boot_source_override_mode": "uefi",
@@ -170,6 +255,37 @@ class TestConfigBios(FakeAnsibleModule):
self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
assert err.value.args[0] == "Changes found to be applied."
+ def test_configure_boot_settings_v2(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_order": ["Boot001", "Boot002", "Boot003"], "job_wait": True, "reset_type": "none",
+ "job_wait_timeout": 900, "boot_source_override_mode": "uefi",
+ "boot_source_override_enabled": "once", "boot_source_override_target": "cd",
+ "uefi_target_boot_source_override": "test_uefi_path"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp_data = {"BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01", "BootOrder": ["Boot001", "Boot002", "Boot003"]}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ mocker.patch(MODULE_PATH + 'idrac_boot.apply_boot_settings', return_value={"JobStatus": "Completed"})
+
+ job_resp = self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert job_resp["JobStatus"] == "Completed"
+
+ idrac_default_args.update({"boot_order": []})
+ with pytest.raises(Exception) as err:
+ self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert err.value.args[0] == "Unable to complete the operation because all boot devices are required for this operation."
+
+ idrac_default_args.pop("boot_order")
+ idrac_default_args.pop("boot_source_override_mode")
+ idrac_default_args.pop("boot_source_override_enabled")
+ job_resp = self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert job_resp["JobStatus"] == "Completed"
+
+ idrac_default_args.update({"boot_source_override_target": "uefi_target"})
+ resp_data.update({"BootSourceOverrideTarget": "cd"})
+ job_resp = self.module.configure_boot_settings(f_module, boot_connection_mock, "System.Embedded.1")
+ assert job_resp["JobStatus"] == "Completed"
+
def test_configure_idrac_boot(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"job_wait": True, "reset_type": "none", "job_wait_timeout": 900,
"boot_options": [{"boot_option_reference": "HardDisk.List.1-1", "enabled": True}]})
@@ -208,7 +324,12 @@ class TestConfigBios(FakeAnsibleModule):
self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
assert err.value.args[0] == "Changes found to be applied."
- @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_default_args.pop("boot_options")
+ job_resp = self.module.configure_idrac_boot(f_module, boot_connection_mock, "System.Embedded.1")
+ assert job_resp == {"JobType": "Completed"}
+
+ @pytest.mark.parametrize("exc_type", [HTTPError, RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
ImportError, ValueError, TypeError])
def test_main_exception(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker, exc_type):
idrac_default_args.update({"boot_source_override_mode": "legacy"})
@@ -217,9 +338,9 @@ class TestConfigBios(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id', side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 401, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
+ if exc_type != URLError:
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
@@ -254,3 +375,32 @@ class TestConfigBios(FakeAnsibleModule):
with pytest.raises(Exception) as err:
self._run_module(idrac_default_args)
assert err.value.args[0]["msg"] == "Failed"
+
+ def test_manin_success_v2(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"boot_source_override_mode": "legacy", "resource_id": "System.Embedded.1"})
+ redfish_response_mock.success = True
+ job_resp = {"Description": "Job Instance", "EndTime": "TIME_NA", "Id": "JID_609237056489",
+ "JobState": "Failed", "JobType": "BIOSConfiguration", "MessageId": "PR19",
+ "Message": "Job scheduled successfully.", "MessageArgs": [],
+ "Name": "Configure: BIOS.Setup.1-1", "PercentComplete": 100}
+ mocker.patch(MODULE_PATH + 'idrac_boot.configure_idrac_boot', return_value=job_resp)
+ boot_return_data = {"Members": [{"BootOptionEnabled": False, "BootOptionReference": "HardDisk.List.1-1",
+ "Description": "Current settings of the Legacy Boot option",
+ "DisplayName": "Hard drive C:", "Id": "HardDisk.List.1-1",
+ "Name": "Legacy Boot option", "UefiDevicePath": "VenHw(D6C0639F-823DE6)"}],
+ "Name": "Boot Options Collection", "Description": "Collection of BootOptions"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_existing_boot_options', return_value=boot_return_data)
+ resp_data = {"BootOrder": ["Boot001", "Boot002", "Boot003"], "BootSourceOverrideEnabled": "Disabled",
+ "BootSourceOverrideMode": "Legacy", "BootSourceOverrideTarget": "UefiTarget",
+ "UefiTargetBootSourceOverride": "/0x31/0x33/0x01/0x01"}
+ mocker.patch(MODULE_PATH + 'idrac_boot.get_response_attributes', return_value=resp_data)
+ mocker.patch(MODULE_PATH + 'idrac_boot.strip_substr_dict', return_value=job_resp)
+ with pytest.raises(Exception) as err:
+ self._run_module(idrac_default_args)
+ assert err.value.args[0]["msg"] == "Failed to update the boot settings."
+
+ idrac_default_args.update({"job_wait": False, "reset_type": "none"})
+ job_resp.update({"JobState": "Running"})
+ # with pytest.raises(Exception) as err:
+ module_return = self._run_module(idrac_default_args)
+ assert module_return["msg"] == "The boot settings job is triggered successfully."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
index c5ee0dc8f..5e94faf91 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.5.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.6.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -25,39 +25,50 @@ from ansible_collections.dellemc.openmanage.plugins.modules import idrac_certifi
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock
-NOT_SUPPORTED_ACTION = "Certificate {op} not supported for the specified certificate type {certype}."
-SUCCESS_MSG = "Successfully performed the '{command}' operation."
+IMPORT_SSL_CERTIFICATE = "#DelliDRACCardService.ImportSSLCertificate"
+EXPORT_SSL_CERTIFICATE = "#DelliDRACCardService.ExportSSLCertificate"
+IDRAC_CARD_SERVICE_ACTION_URI = "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions"
+IDRAC_CARD_SERVICE_ACTION_URI_RES_ID = "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions"
+
+NOT_SUPPORTED_ACTION = "Certificate '{operation}' not supported for the specified certificate type '{cert_type}'."
+SUCCESS_MSG = "Successfully performed the '{command}' certificate operation."
+SUCCESS_MSG_SSL = "Successfully performed the SSL key upload and '{command}' certificate operation."
NO_CHANGES_MSG = "No changes found to be applied."
CHANGES_MSG = "Changes found to be applied."
-NO_RESET = " Reset iDRAC to apply new certificate. Until iDRAC is reset, the old certificate will be active."
+WAIT_NEGATIVE_OR_ZERO_MSG = "The value for the `wait` parameter cannot be negative or zero."
+SSL_KEY_MSG = "Unable to locate the SSL key file at {ssl_key}."
+SSK_KEY_NOT_SUPPORTED = "Upload of SSL key not supported"
+NO_RESET = "Reset iDRAC to apply the new certificate. Until the iDRAC is reset, the old certificate will remain active."
RESET_UNTRACK = " iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
-RESET_SUCCESS = " iDRAC has been reset successfully."
+RESET_SUCCESS = "iDRAC has been reset successfully."
RESET_FAIL = " Unable to reset the iDRAC. For changes to reflect, manually reset the iDRAC."
SYSTEM_ID = "System.Embedded.1"
MANAGER_ID = "iDRAC.Embedded.1"
SYSTEMS_URI = "/redfish/v1/Systems"
MANAGERS_URI = "/redfish/v1/Managers"
-IDRAC_SERVICE = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService"
+IDRAC_SERVICE = "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService"
CSR_SSL = "/redfish/v1/CertificateService/Actions/CertificateService.GenerateCSR"
-IMPORT_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"
-EXPORT_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate"
-RESET_SSL = "/redfish/v1/Dell/Managers/{res_id}/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg"
+IMPORT_SSL = f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.ImportSSLCertificate"
+UPLOAD_SSL = f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.UploadSSLKey"
+EXPORT_SSL = f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.ExportSSLCertificate"
+RESET_SSL = f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.SSLResetCfg"
IDRAC_RESET = "/redfish/v1/Managers/{res_id}/Actions/Manager.Reset"
idrac_service_actions = {
- "#DelliDRACCardService.DeleteCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.DeleteCertificate",
- "#DelliDRACCardService.ExportCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportCertificate",
- "#DelliDRACCardService.ExportSSLCertificate": EXPORT_SSL,
+ "#DelliDRACCardService.DeleteCertificate": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.DeleteCertificate",
+ "#DelliDRACCardService.ExportCertificate": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.ExportCertificate",
+ EXPORT_SSL_CERTIFICATE: EXPORT_SSL,
"#DelliDRACCardService.FactoryIdentityCertificateGenerateCSR":
- "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityCertificateGenerateCSR",
+ f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.FactoryIdentityCertificateGenerateCSR",
"#DelliDRACCardService.FactoryIdentityExportCertificate":
- "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityExportCertificate",
+ f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.FactoryIdentityExportCertificate",
"#DelliDRACCardService.FactoryIdentityImportCertificate":
- "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.FactoryIdentityImportCertificate",
- "#DelliDRACCardService.GenerateSEKMCSR": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.GenerateSEKMCSR",
- "#DelliDRACCardService.ImportCertificate": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportCertificate",
- "#DelliDRACCardService.ImportSSLCertificate": IMPORT_SSL,
- "#DelliDRACCardService.SSLResetCfg": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg",
- "#DelliDRACCardService.iDRACReset": "/redfish/v1/Managers/{res_id}/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.iDRACReset"
+ f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.FactoryIdentityImportCertificate",
+ "#DelliDRACCardService.GenerateSEKMCSR": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.GenerateSEKMCSR",
+ "#DelliDRACCardService.ImportCertificate": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.ImportCertificate",
+ IMPORT_SSL_CERTIFICATE: IMPORT_SSL,
+ "#DelliDRACCardService.UploadSSLKey": UPLOAD_SSL,
+ "#DelliDRACCardService.SSLResetCfg": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.SSLResetCfg",
+ "#DelliDRACCardService.iDRACReset": f"{IDRAC_CARD_SERVICE_ACTION_URI}/DelliDRACCardService.iDRACReset"
}
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_certificates.'
@@ -79,7 +90,8 @@ class TestIdracCertificates(FakeAnsibleModule):
return idrac_obj
@pytest.fixture
- def idrac_connection_certificates_mock(self, mocker, idrac_certificates_mock):
+ def idrac_connection_certificates_mock(
+ self, mocker, idrac_certificates_mock):
idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
return_value=idrac_certificates_mock)
idrac_conn_mock.return_value.__enter__.return_value = idrac_certificates_mock
@@ -99,9 +111,15 @@ class TestIdracCertificates(FakeAnsibleModule):
{"json_data": {"CertificateFile": b'Hello world!'}, 'message': CHANGES_MSG, "success": True,
"reset_idrac": (True, False, RESET_SUCCESS), 'check_mode': True,
'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'reset': False}},
+ {"json_data": {"CertificateFile": b'Hello world!', "ssl_key": b'Hello world!'}, 'message': CHANGES_MSG, "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS), 'check_mode': True,
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', "ssl_key": '.pem', 'reset': False}},
{"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), NO_RESET), "success": True,
"reset_idrac": (True, False, RESET_SUCCESS),
'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'reset': False}},
+ {"json_data": {}, 'message': "{0} {1}".format(SUCCESS_MSG_SSL.format(command="import"), NO_RESET), "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', "ssl_key": '.pem', 'reset': False}},
{"json_data": {}, 'message': SUCCESS_MSG.format(command="generate_csr"),
"success": True,
"get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
@@ -117,7 +135,7 @@ class TestIdracCertificates(FakeAnsibleModule):
"subject_alt_name": [
"emc"
]}}},
- {"json_data": {}, 'message': NOT_SUPPORTED_ACTION.format(op="generate_csr", certype="CA"),
+ {"json_data": {}, 'message': NOT_SUPPORTED_ACTION.format(operation="generate_csr", cert_type="CA"),
"success": True,
"get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
'mparams': {'command': 'generate_csr', 'certificate_type': "CA", 'certificate_path': tempfile.gettempdir(),
@@ -141,49 +159,84 @@ class TestIdracCertificates(FakeAnsibleModule):
"success": True,
"get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem'}},
+ {"json_data": {}, 'message': "{0} {1}".format(SUCCESS_MSG_SSL.format(command="import"), RESET_SUCCESS),
+ "success": True,
+ "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'ssl_key': '.pem'}},
{"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="import"), RESET_SUCCESS),
"success": True,
"reset_idrac": (True, False, RESET_SUCCESS),
'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem'}},
+ {"json_data": {}, 'message': "{0} {1}".format(SUCCESS_MSG_SSL.format(command="import"), RESET_SUCCESS),
+ "success": True,
+ "reset_idrac": (True, False, RESET_SUCCESS),
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', "ssl_key": '.pem'}},
{"json_data": {}, 'message': SUCCESS_MSG.format(command="export"), "success": True, "get_cert_url": "url",
'mparams': {'command': 'export', 'certificate_type': "HTTPS", 'certificate_path': tempfile.gettempdir()}},
{"json_data": {}, 'message': "{0}{1}".format(SUCCESS_MSG.format(command="reset"), RESET_SUCCESS),
"success": True, "get_cert_url": "url", "reset_idrac": (True, False, RESET_SUCCESS),
'mparams': {'command': 'reset', 'certificate_type': "HTTPS"}
- }
+ },
+ {"json_data": {}, 'message': WAIT_NEGATIVE_OR_ZERO_MSG, "success": True,
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'wait': -1}},
+ {"json_data": {}, 'message': WAIT_NEGATIVE_OR_ZERO_MSG, "success": True,
+ 'mparams': {'command': 'reset', 'certificate_type': "HTTPS", 'wait': 0}},
+ {"json_data": {}, 'message': f"{SSL_KEY_MSG.format(ssl_key='/invalid/path')}", "success": True,
+ 'mparams': {'command': 'import', 'certificate_type': "HTTPS", 'certificate_path': '.pem', 'ssl_key': '/invalid/path'}}
])
- def test_idrac_certificates(self, params, idrac_connection_certificates_mock, idrac_default_args, mocker):
- idrac_connection_certificates_mock.success = params.get("success", True)
+ def test_idrac_certificates(
+ self, params, idrac_connection_certificates_mock, idrac_default_args, mocker):
+ idrac_connection_certificates_mock.success = params.get(
+ "success", True)
idrac_connection_certificates_mock.json_data = params.get('json_data')
- if params.get('mparams').get('certificate_path') and params.get('mparams').get('command') == 'import':
+ if params.get('mparams').get('certificate_path') and params.get(
+ 'mparams').get('command') == 'import':
sfx = params.get('mparams').get('certificate_path')
temp = tempfile.NamedTemporaryFile(suffix=sfx, delete=False)
temp.write(b'Hello')
temp.close()
params.get('mparams')['certificate_path'] = temp.name
+ if params.get('mparams').get('ssl_key') == '.pem':
+ temp = tempfile.NamedTemporaryFile(suffix=sfx, delete=False)
+ temp.write(b'Hello')
+ temp.close()
+ params.get('mparams')['ssl_key'] = temp.name
mocker.patch(MODULE_PATH + 'get_res_id', return_value=MANAGER_ID)
- mocker.patch(MODULE_PATH + 'get_idrac_service', return_value=IDRAC_SERVICE.format(res_id=MANAGER_ID))
- mocker.patch(MODULE_PATH + 'get_actions_map', return_value=idrac_service_actions)
+ mocker.patch(
+ MODULE_PATH + 'get_idrac_service',
+ return_value=IDRAC_SERVICE.format(
+ res_id=MANAGER_ID))
+ mocker.patch(
+ MODULE_PATH + 'get_actions_map',
+ return_value=idrac_service_actions)
# mocker.patch(MODULE_PATH + 'get_cert_url', return_value=params.get('get_cert_url'))
# mocker.patch(MODULE_PATH + 'write_to_file', return_value=params.get('write_to_file'))
- mocker.patch(MODULE_PATH + 'reset_idrac', return_value=params.get('reset_idrac'))
+ mocker.patch(
+ MODULE_PATH + 'reset_idrac',
+ return_value=params.get('reset_idrac'))
idrac_default_args.update(params.get('mparams'))
- result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ result = self._run_module(
+ idrac_default_args,
+ check_mode=params.get(
+ 'check_mode',
+ False))
if params.get('mparams').get('command') == 'import' and params.get('mparams').get(
'certificate_path') and os.path.exists(temp.name):
os.remove(temp.name)
assert result['msg'] == params['message']
@pytest.mark.parametrize("params", [{"json_data": {"Members": [{"@odata.id": '/redfish/v1/Mangers/iDRAC.1'}]},
- "certype": 'Server', "res_id": "iDRAC.1"},
+ "cert_type": 'Server', "res_id": "iDRAC.1"},
{"json_data": {"Members": []},
- "certype": 'Server', "res_id": MANAGER_ID}
+ "cert_type": 'Server', "res_id": MANAGER_ID}
])
def test_res_id(
self, params, idrac_redfish_mock_for_certs, ome_response_mock):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
- res_id = self.module.get_res_id(idrac_redfish_mock_for_certs, params.get('certype'))
+ res_id = self.module.get_res_id(
+ idrac_redfish_mock_for_certs,
+ params.get('cert_type'))
assert res_id == params['res_id']
@pytest.mark.parametrize("params", [{"json_data": {
@@ -196,62 +249,97 @@ class TestIdracCertificates(FakeAnsibleModule):
"VirtualMedia": {
"@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia"}
},
- "idrac_srv": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService', "res_id": "iDRAC.1"},
- {"json_data": {"Members": []},
- "idrac_srv": '/redfish/v1/Dell/Managers/iDRAC.Embedded.1/DelliDRACCardService', "res_id": MANAGER_ID}
+ "idrac_srv": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService', "res_id": "iDRAC.1"}
])
def test_get_idrac_service(
self, params, idrac_redfish_mock_for_certs, ome_response_mock):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
- idrac_srv = self.module.get_idrac_service(idrac_redfish_mock_for_certs, params.get('res_id'))
+ idrac_srv = self.module.get_idrac_service(
+ idrac_redfish_mock_for_certs, params.get('res_id'))
assert idrac_srv == params['idrac_srv']
+ def test_write_to_file(self, idrac_default_args):
+ inv_dir = "invalid_temp_dir"
+ idrac_default_args.update({"certificate_path": inv_dir})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.write_to_file(f_module, {}, "dkey")
+ assert ex.value.args[0] == f"Provided directory path '{inv_dir}' is not valid."
+ temp_dir = tempfile.mkdtemp()
+ os.chmod(temp_dir, 0o000)
+ idrac_default_args.update({"certificate_path": temp_dir})
+ with pytest.raises(Exception) as ex:
+ self.module.write_to_file(f_module, {}, "dkey")
+ assert ex.value.args[0] == f"Provided directory path '{temp_dir}' is not writable. Please check if you have appropriate permissions."
+ os.removedirs(temp_dir)
+
+ def test_upload_ssl_key(self, idrac_default_args):
+ temp_ssl = tempfile.NamedTemporaryFile(delete=False)
+ temp_ssl.write(b'ssl_key')
+ temp_ssl.close()
+ f_module = self.get_module_mock(params=idrac_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.upload_ssl_key(f_module, {}, {}, temp_ssl.name, "res_id")
+ assert ex.value.args[0] == "Upload of SSL key not supported"
+ os.chmod(temp_ssl.name, 0o000)
+ with pytest.raises(Exception) as ex:
+ self.module.upload_ssl_key(f_module, {}, {}, temp_ssl.name, "res_id")
+ assert "Permission denied" in ex.value.args[0]
+ os.remove(temp_ssl.name)
+
@pytest.mark.parametrize("params", [{"json_data": {
"Actions": {
- "#DelliDRACCardService.ExportSSLCertificate": {
- "SSLCertType@Redfish.AllowableValues": ["CA", "CSC", "ClientTrustCertificate", "Server"],
+ EXPORT_SSL_CERTIFICATE: {
+ "SSLCertType@Redfish.AllowableValues": ["CA", "CSC", "CustomCertificate", "ClientTrustCertificate", "Server"],
"target":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate"
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.ExportSSLCertificate"
},
- "#DelliDRACCardService.ImportSSLCertificate": {
- "CertificateType@Redfish.AllowableValues": ["CA", "CSC", "ClientTrustCertificate", "Server"],
+ IMPORT_SSL_CERTIFICATE: {
+ "CertificateType@Redfish.AllowableValues": ["CA", "CSC", "CustomCertificate", "ClientTrustCertificate", "Server"],
"target":
- "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.ImportSSLCertificate"
},
"#DelliDRACCardService.SSLResetCfg": {
- "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg"
+ "target": f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.SSLResetCfg"
},
+ "#DelliDRACCardService.UploadSSLKey": {
+ "target": f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.UploadSSLKey"}
},
},
"idrac_service_uri": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService',
"actions": {
- '#DelliDRACCardService.ExportSSLCertificate':
- '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ExportSSLCertificate',
- '#DelliDRACCardService.ImportSSLCertificate':
- '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate',
+ EXPORT_SSL_CERTIFICATE:
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.ExportSSLCertificate",
+ IMPORT_SSL_CERTIFICATE:
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.ImportSSLCertificate",
'#DelliDRACCardService.SSLResetCfg':
- '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService/Actions/DelliDRACCardService.SSLResetCfg'}},
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.SSLResetCfg",
+ '#DelliDRACCardService.UploadSSLKey':
+ f"{IDRAC_CARD_SERVICE_ACTION_URI_RES_ID}/DelliDRACCardService.UploadSSLKey"}},
{"json_data": {"Members": []},
- "idrac_service_uri": '/redfish/v1/Dell/Managers/iDRAC.Embedded.1/DelliDRACCardService',
+ "idrac_service_uri": '/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DelliDRACCardService',
"actions": idrac_service_actions}
])
def test_get_actions_map(
self, params, idrac_redfish_mock_for_certs, ome_response_mock):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
- actions = self.module.get_actions_map(idrac_redfish_mock_for_certs, params.get('idrac_service_uri'))
+ actions = self.module.get_actions_map(
+ idrac_redfish_mock_for_certs,
+ params.get('idrac_service_uri'))
assert actions == params['actions']
- @pytest.mark.parametrize("params", [{"actions": {}, "op": "generate_csr",
- "certype": 'Server', "res_id": "iDRAC.1",
+ @pytest.mark.parametrize("params", [{"actions": {}, "operation": "generate_csr",
+ "cert_type": 'Server', "res_id": "iDRAC.1",
"dynurl": "/redfish/v1/CertificateService/Actions/CertificateService.GenerateCSR"},
- {"actions": {}, "op": "import",
- "certype": 'Server', "res_id": "iDRAC.1",
- "dynurl": "/redfish/v1/Dell/Managers/iDRAC.1/DelliDRACCardService/Actions/DelliDRACCardService.ImportSSLCertificate"}
+ {"actions": {}, "operation": "import",
+ "cert_type": 'Server', "res_id": "iDRAC.1",
+ "dynurl": "/redfish/v1/Managers/iDRAC.1/Oem/Dell/DelliDRACCardService/Actions/"
+ "DelliDRACCardService.ImportSSLCertificate"}
])
def test_get_cert_url(self, params):
- dynurl = self.module.get_cert_url(params.get('actions'), params.get('op'), params.get('certype'),
+ dynurl = self.module.get_cert_url(params.get('actions'), params.get('operation'), params.get('cert_type'),
params.get('res_id'))
assert dynurl == params['dynurl']
@@ -269,6 +357,21 @@ class TestIdracCertificates(FakeAnsibleModule):
'Resolution': 'No response action is required.',
'Severity': 'Informational'}]},
"mparams": {'command': 'export', 'certificate_type': "HTTPS",
+ 'certificate_path': tempfile.gettempdir(), 'reset': False}
+ },
+ {"cert_data": {"CertificateFile": 'Hello world!',
+ "@Message.ExtendedInfo": [{
+ "Message": "Successfully exported SSL Certificate.",
+ "MessageId": "IDRAC.2.5.LC067",
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"}
+ ]},
+ "result": {'@Message.ExtendedInfo': [
+ {'Message': 'Successfully exported SSL Certificate.',
+ 'MessageId': 'IDRAC.2.5.LC067',
+ 'Resolution': 'No response action is required.',
+ 'Severity': 'Informational'}]},
+ "mparams": {'command': 'generate_csr', 'certificate_type': "HTTPS",
'certificate_path': tempfile.gettempdir(), 'reset': False}}])
def test_format_output(self, params, idrac_default_args):
idrac_default_args.update(params.get('mparams'))
@@ -280,18 +383,20 @@ class TestIdracCertificates(FakeAnsibleModule):
@pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
ConnectionError, HTTPError, ImportError, RuntimeError])
- def test_main_exceptions(self, exc_type, idrac_connection_certificates_mock, idrac_default_args, mocker):
- idrac_default_args.update({"command": "export", "certificate_path": "mypath"})
+ def test_main_exceptions(
+ self, exc_type, idrac_connection_certificates_mock, idrac_default_args, mocker):
+ idrac_default_args.update(
+ {"command": "export", "certificate_path": "mypath"})
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + "get_res_id",
side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + "get_res_id",
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
- result = self._run_module_with_fail_json(idrac_default_args)
+ result = self._run_module(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py
index c30ce409e..6d9fdd51b 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.4.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -15,26 +15,38 @@ __metaclass__ = type
import json
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_firmware
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from mock import MagicMock, patch, Mock
+from mock import MagicMock, Mock
from io import StringIO
from ansible.module_utils._text import to_text
-from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult
+from ansible.module_utils.six.moves.urllib.parse import ParseResult
from pytest import importorskip
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+CATALOG = "Catalog.xml"
+DELL_SHARE = "https://downloads.dell.com"
+GET_JOBID = "idrac_firmware.get_jobid"
+CONVERT_XML_JSON = "idrac_firmware._convert_xmltojson"
+SUCCESS_MSG = "Successfully updated the firmware."
+UPDATE_URL = "idrac_firmware.update_firmware_url_redfish"
+WAIT_FOR_JOB = "idrac_firmware.wait_for_job_completion"
+TIME_SLEEP = "idrac_firmware.time.sleep"
+VALIDATE_CATALOG = "idrac_firmware._validate_catalog_file"
+SHARE_PWD = "share_pwd"
+USER_PWD = "user_pwd"
+TEST_HOST = "'https://testhost.com'"
class TestidracFirmware(FakeAnsibleModule):
module = idrac_firmware
@pytest.fixture
- def idrac_firmware_update_mock(self, mocker):
+ def idrac_firmware_update_mock(self):
omsdk_mock = MagicMock()
idrac_obj = MagicMock()
omsdk_mock.update_mgr = idrac_obj
@@ -123,351 +135,72 @@ class TestidracFirmware(FakeAnsibleModule):
idrac_conn_class_mock.return_value.__enter__.return_value = idrac_firmware_job_mock
return idrac_firmware_job_mock
- def test_main_idrac_firmware_success_case(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock,
- idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
- "share_user": "sharename", "share_password": "sharepswd",
- "share_mnt": "sharmnt",
- "reboot": True, "job_wait": True
- })
- message = {"Status": "Success", "update_msg": "Successfully updated the firmware.",
- "update_status": "Success", 'changed': False, 'failed': False}
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {}
- mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_redfish', return_value=message)
- result = self._run_module(idrac_default_args)
- assert result == {'msg': 'Successfully updated the firmware.', 'update_status': 'Success',
- 'changed': False, 'failed': False}
-
- @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
- ImportError, ValueError, TypeError])
- def test_main_idrac_firmware_exception_handling_case(self, exc_type, mocker, idrac_default_args,
- idrac_connection_firmware_redfish_mock,
- idrac_connection_firmware_mock):
- idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
- "share_user": "sharename", "share_password": "sharepswd",
- "share_mnt": "sharmnt",
- "reboot": True, "job_wait": True
- })
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- mocker.patch(MODULE_PATH +
- 'idrac_firmware._validate_catalog_file', return_value="catalog_file_name")
- mocker.patch(MODULE_PATH +
- 'idrac_firmware.update_firmware_omsdk', side_effect=exc_type('test'))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert 'msg' in result
- assert result['failed'] is True
-
- def test_main_HTTPError_case(self, idrac_connection_firmware_mock, idrac_default_args,
- idrac_connection_firmware_redfish_mock, mocker):
- idrac_default_args.update({"share_name": "sharename", "catalog_file_name": "Catalog.xml",
- "share_user": "sharename", "share_password": "sharepswd",
- "share_mnt": "sharmnt",
- "reboot": True, "job_wait": True
- })
- json_str = to_text(json.dumps({"data": "out"}))
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_omsdk',
- side_effect=HTTPError('http://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"},
- StringIO(json_str)))
- result = self._run_module_with_fail_json(idrac_default_args)
- assert 'msg' in result
- assert result['failed'] is True
-
- def test_update_firmware_omsdk_success_case01(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock, idrac_default_args, mocker,
- re_match_mock):
- idrac_default_args.update({"share_name": "https://downloads.dell.com", "catalog_file_name": "Catalog.xml",
- "share_user": "UserName", "share_password": "sharepswd",
- "share_mnt": "shrmnt",
- "reboot": True, "job_wait": True, "ignore_cert_warning": True,
- "apply_update": True})
- mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
- return_value=({"update_status": {"job_details": {"Data": {"StatusCode": 200,
- "body": {"PackageList": [{}]}}}}},
- {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
-
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
- return_value=({"BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64"
- "_19.10.12_A00.EXE",
- "PackagePath": "FOLDER05902898M/1/Drivers-for-"
- "OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12",
- "RebootType": "NONE",
- "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
- }, True, False))
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_connection_firmware_mock.match.return_value = "2.70"
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
- idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {
- "job_details": {"Data": {"StatusCode": 200, "GetRepoBasedUpdateList_OUTPUT": {},
- "body": {"PackageList1": [{}]}}}
- }
- result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
- assert result["update_status"]["job_details"]["Data"]["StatusCode"] == 200
-
- def test_update_firmware_omsdk_success_case02(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock, idrac_default_args, mocker,
- re_match_mock, fileonshare_idrac_firmware_mock):
- idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
- "share_user": "UserName", "share_password": "sharepswd",
- "share_mnt": "shrmnt",
- "reboot": True, "job_wait": True, "ignore_cert_warning": True,
- "apply_update": True
- })
- mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
- return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
- "body": {"PackageList": [{}]}}}}},
- {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
-
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
- return_value=({"BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64"
- "_19.10.12_A00.EXE",
- "PackagePath": "FOLDER05902898M/1/Drivers-for-"
- "OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12",
- "RebootType": "NONE",
- "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
- }, True))
-
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_connection_firmware_mock.match.return_value = "2.70"
- idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
- idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {
- "job_details": {"Data": {"StatusCode": 200, "GetRepoBasedUpdateList_OUTPUT": {},
- "body": {"PackageList": [{}]}}}}
- upd_share = fileonshare_idrac_firmware_mock
- upd_share.IsValid = True
- result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
- assert result["update_status"]["job_details"]["Data"]["StatusCode"] == 200
-
- def test_update_firmware_redfish_success_case03(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock,
- idrac_default_args, mocker, re_match_mock):
- idrac_default_args.update({"share_name": "https://downloads.dell.com", "catalog_file_name": "Catalog.xml",
- "share_user": "UserName", "share_password": "sharepswd",
- "share_mnt": "shrmnt",
- "reboot": True, "job_wait": False, "ignore_cert_warning": True,
- "apply_update": True
- })
- mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_redfish",
- return_value=(
- {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}},
- {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
-
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
- return_value=({"BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_"
- "19.10.12_A00.EXE",
- "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-"
- "Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12",
- "RebootType": "NONE",
- "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
- }, True))
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_connection_firmware_mock.re_match_mock.group = Mock(return_value="3.30")
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "3.30"}
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
- idrac_connection_firmware_mock.ServerGeneration = "14"
- result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
- assert result["changed"] is False
- assert result["update_msg"] == "Successfully triggered the job to update the firmware."
-
- def test_update_firmware_omsdk_status_success_case01(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock, idrac_default_args,
- mocker, re_match_mock, fileonshare_idrac_firmware_mock):
- idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
- "share_user": "UserName", "share_password": "sharepswd",
- "share_mnt": "sharemnt",
- "reboot": True, "job_wait": True, "ignore_cert_warning": True,
- "apply_update": True
- })
- mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
- return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
- "body": {"PackageList": [{}]}}}}},
- {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}}))
-
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
- return_value={
- "BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-Deployment_"
- "Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12",
- "RebootType": "NONE",
- "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
- })
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_firmware_mock.match.return_value = "2.70"
- idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {"job_details": {
- "Data": {"StatusCode": 200, "body": {}, "GetRepoBasedUpdateList_OUTPUT": {}}, "Status": "Success"},
- "Status": "Success"}
- upd_share = fileonshare_idrac_firmware_mock
- upd_share.IsValid = True
- result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
- assert result == {'changed': False, 'failed': False,
- 'update_msg': 'Successfully triggered the job to update the firmware.',
- 'update_status': {'Status': 'Success',
- 'job_details': {'Data': {'StatusCode': 200, 'body': {},
- "GetRepoBasedUpdateList_OUTPUT": {}},
- 'Status': 'Success'}}}
-
- def test_update_firmware_omsdk_status_failed_case01(self, idrac_connection_firmware_mock,
- idrac_connection_firmware_redfish_mock,
- idrac_default_args, mocker, re_match_mock):
- idrac_default_args.update({"share_name": "mhttps://downloads.dell.com", "catalog_file_name": "Catalog.xml",
- "share_user": "UserName", "share_password": "sharepswd",
- "share_mnt": "sharemnt",
- "reboot": True, "job_wait": True, "ignore_cert_warning": True,
- "apply_update": True})
- mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
- return_value=({"update_status": {"job_details": {"data": {"StatusCode": 200,
- "body": {"PackageList": [{}]}}}}},
- {"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}}))
-
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson",
- return_value={
- "BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-Deployment_"
- "Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12",
- "RebootType": "NONE",
- "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"
- })
+ @pytest.fixture
+ def idrac_connection_firm_mock(self, mocker, redfish_response_mock):
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_connection_firmware_mock.match.return_value = "2.70"
- idrac_connection_firmware_mock.ServerGeneration.return_value = "13"
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
- idrac_connection_firmware_mock.update_mgr.update_from_repo.return_value = {"job_details": {"Data": {
- "StatusCode": 200, "body": {}, "GetRepoBasedUpdateList_OUTPUT": {}}, "Status": "Failed"},
- "Status": "Failed"}
- with pytest.raises(Exception) as ex:
- self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
- assert ex.value.args[0] == "Firmware update failed."
+ connection_class_mock = mocker.patch(MODULE_PATH + 'idrac_firmware.iDRACRedfishAPI')
+ redfish_connection_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_obj
- def test__validate_catalog_file_case01(self, idrac_connection_firmware_mock, idrac_default_args):
+ def test__validate_catalog_file_case01(self, idrac_default_args):
idrac_default_args.update({"catalog_file_name": ""})
with pytest.raises(ValueError) as exc:
self.module._validate_catalog_file("")
assert exc.value.args[0] == 'catalog_file_name should be a non-empty string.'
- def test__validate_catalog_file_case02(self, idrac_connection_firmware_mock, idrac_default_args):
+ def test__validate_catalog_file_case02(self, idrac_default_args):
idrac_default_args.update({"catalog_file_name": "Catalog.json"})
with pytest.raises(ValueError) as exc:
self.module._validate_catalog_file("Catalog.json")
assert exc.value.args[0] == 'catalog_file_name should be an XML file.'
- def test_convert_xmltojson_case01(self, mocker, idrac_connection_firmware_mock,
- idrac_default_args, ET_convert_mock):
- idrac_default_args.update({"PackageList": [{
- "BaseLocation": None,
- "ComponentID": "18981",
- "ComponentType": "APAC",
- "Criticality": "3",
- "DisplayName": "Dell OS Driver Pack",
- "JobID": None,
- "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackagePath":
- "FOLDER05902898M/1/Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
- "PackageVersion": "19.10.12"}]})
- mocker.patch(MODULE_PATH + "idrac_firmware.get_job_status", return_value=("Component", False))
- mocker.patch(MODULE_PATH + 'idrac_firmware.ET')
- result = self.module._convert_xmltojson({"PackageList": [{"INSTANCENAME": {"PROPERTY": {"NAME": "abc"}}}]},
- MagicMock(), None)
- assert result == ([], True, False)
-
- def test_convert_xmltojson_case02(self, mocker, idrac_connection_firmware_mock, idrac_default_args):
- idrac_default_args.update({"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}})
- packagelist = {"PackageList": "INSTANCENAME"}
+ def test_convert_xmltojson(self, mocker, idrac_default_args, idrac_connection_firmware_redfish_mock):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
mocker.patch(MODULE_PATH + "idrac_firmware.get_job_status", return_value=("Component", False))
- mocker.patch(MODULE_PATH + 'idrac_firmware.ET')
- result = self.module._convert_xmltojson(packagelist, MagicMock(), None)
+ job_details = {"PackageList": """<?xml version="1.0" encoding="UTF-8" ?><root><BaseLocation /><ComponentID>18981</ComponentID></root>"""}
+ result = self.module._convert_xmltojson(f_module, job_details, idrac_connection_firmware_redfish_mock)
assert result == ([], True, False)
-
- def test_get_jobid_success_case01(self, idrac_connection_firmware_mock, idrac_default_args,
- idrac_firmware_job_mock,
- idrac_connection_firmware_redfish_mock):
- idrac_default_args.update({"Location": "https://jobmnager/jid123"})
- idrac_firmware_job_mock.status_code = 202
- idrac_firmware_job_mock.Success = True
- idrac_connection_firmware_redfish_mock.update_mgr.headers.get().split().__getitem__().return_value = "jid123"
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.get_jobid(f_module, idrac_firmware_job_mock)
- assert result == idrac_connection_firmware_redfish_mock.headers.get().split().__getitem__()
-
- def test_get_jobid_fail_case01(self, idrac_connection_firmware_mock, idrac_default_args,
- idrac_firmware_job_mock):
- idrac_firmware_job_mock.status_code = 202
- idrac_firmware_job_mock.headers = {"Location": None}
- f_module = self.get_module_mock(params=idrac_default_args)
- with pytest.raises(Exception) as exc:
- self.module.get_jobid(f_module, idrac_firmware_job_mock)
- assert exc.value.args[0] == "Failed to update firmware."
-
- def test_get_jobid_fail_case02(self, idrac_connection_firmware_mock, idrac_default_args,
- idrac_firmware_job_mock):
- idrac_firmware_job_mock.status_code = 400
+ et_mock = MagicMock()
+ et_mock.iter.return_value = [et_mock, et_mock]
+ mocker.patch(MODULE_PATH + "idrac_firmware.ET.fromstring", return_value=et_mock)
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_job_status", return_value=("Component", True))
+ result = self.module._convert_xmltojson(f_module, job_details, idrac_connection_firmware_redfish_mock)
+ assert result[0] == ['Component', 'Component']
+ assert result[1]
+ assert result[2]
+
+ def test_update_firmware_url_omsdk(self, idrac_connection_firmware_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": DELL_SHARE, "catalog_file_name": CATALOG,
+ "share_user": "shareuser", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": False, "ignore_cert_warning": True,
+ "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
+ "idrac_password": "idrac_password", "idrac_port": 443, "proxy_support": "Off"})
+ mocker.patch(MODULE_PATH + GET_JOBID, return_value="23451")
+ mocker.patch(MODULE_PATH + "idrac_firmware.get_check_mode_status")
+ idrac_connection_firmware_mock.use_redfish = True
+ idrac_connection_firmware_mock.job_mgr.get_job_status_redfish.return_value = "23451"
+ idrac_connection_firmware_mock.update_mgr.update_from_dell_repo_url.return_value = {"InstanceID": "JID_12345678"}
f_module = self.get_module_mock(params=idrac_default_args)
- with pytest.raises(Exception) as exc:
- self.module.get_jobid(f_module, idrac_firmware_job_mock)
- assert exc.value.args[0] == "Failed to update firmware."
+ payload = {"ApplyUpdate": "True", "CatalogFile": CATALOG, "IgnoreCertWarning": "On",
+ "RebootNeeded": True, "UserName": "username", "Password": USER_PWD}
+ result = self.module.update_firmware_url_omsdk(f_module, idrac_connection_firmware_mock,
+ DELL_SHARE, CATALOG, True, True, True, True, payload)
+ assert result[0] == {"InstanceID": "JID_12345678"}
def test_update_firmware_url_omsdk_success_case02(self, idrac_connection_firmware_mock, idrac_default_args,
mocker, idrac_connection_firmware_redfish_mock):
- idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
- "share_user": "shareuser", "share_password": "sharepswd",
+ idrac_default_args.update({"share_name": DELL_SHARE, "catalog_file_name": CATALOG,
+ "share_user": "shareuser", "share_password": SHARE_PWD,
"share_mnt": "sharmnt",
"reboot": True, "job_wait": False, "ignore_cert_warning": True,
"share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
- "idrac_password": "idrac_password", "idrac_port": 443
+ "idrac_password": "idrac_password", "idrac_port": 443, "proxy_support": "Off",
})
- mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
- return_value="23451")
-
+ mocker.patch(MODULE_PATH + GET_JOBID, return_value="23451")
mocker.patch(MODULE_PATH + "idrac_firmware.urlparse",
return_value=ParseResult(scheme='http', netloc='downloads.dell.com',
path='/%7Eguido/Python.html',
@@ -478,148 +211,353 @@ class TestidracFirmware(FakeAnsibleModule):
idrac_connection_firmware_redfish_mock.get_job_status_redfish = "Status"
idrac_connection_firmware_redfish_mock.update_mgr.job_mgr.job_wait.return_value = "12345"
idrac_connection_firmware_mock.update_mgr.update_from_repo_url.return_value = {
- "update_status": {"job_details": {"data": {
- "StatusCode": 200,
- "body": {
- "PackageList": [
- {}]
- }
- }
- }
- }
+ "update_status": {"job_details": {"data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}}
}
idrac_connection_firmware_mock.update_mgr.update_from_dell_repo_url.return_value = {"job_details": {"Data": {
- "GetRepoBasedUpdateList_OUTPUT": {
- "Message": [
- {}]
- }
- }
+ "GetRepoBasedUpdateList_OUTPUT": {"Message": [{}]}}}
}
- }
- payload = {"ApplyUpdate": "True",
- "CatalogFile": "Catalog.xml",
- "IgnoreCertWarning": "On",
- "RebootNeeded": True,
- "UserName": "username",
- "Password": "psw"
- }
+ payload = {"ApplyUpdate": "True", "CatalogFile": CATALOG, "IgnoreCertWarning": "On", "RebootNeeded": True,
+ "UserName": "username", "Password": USER_PWD}
result = self.module.update_firmware_url_omsdk(f_module, idrac_connection_firmware_mock,
- "http://downloads.dell.com", "catalog.xml", True, True, True,
+ DELL_SHARE, CATALOG, True, True, True,
False, payload)
- assert result == (
- {'job_details': {'Data': {'GetRepoBasedUpdateList_OUTPUT': {'Message': [{}]}}}}, {})
-
- def test_update_firmware_url_omsdk(self, idrac_connection_firmware_mock, idrac_default_args, mocker,
- idrac_connection_firmware_redfish_mock):
- idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
- "share_user": "shareuser", "share_password": "sharepswd",
- "share_mnt": "sharmnt",
- "reboot": True, "job_wait": False, "ignore_cert_warning": True,
- "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
- "idrac_password": "idrac_password", "idrac_port": 443
- })
- mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
- return_value="23451")
- mocker.patch(MODULE_PATH + "idrac_firmware.get_check_mode_status")
- idrac_connection_firmware_mock.use_redfish = True
- idrac_connection_firmware_mock.job_mgr.get_job_status_redfish.return_value = "23451"
- idrac_connection_firmware_mock.update_mgr.update_from_dell_repo_url.return_value = {
- "InstanceID": "JID_12345678"}
- f_module = self.get_module_mock(params=idrac_default_args)
- payload = {"ApplyUpdate": "True", "CatalogFile": "Catalog.xml", "IgnoreCertWarning": "On",
- "RebootNeeded": True, "UserName": "username", "Password": "psw"}
- result = self.module.update_firmware_url_omsdk(f_module, idrac_connection_firmware_mock,
- "http://downloads.dell.com/repo",
- "catalog.xml", True, True, True, True, payload)
- assert result[0] == {"InstanceID": "JID_12345678"}
-
- def _test_update_firmware_redfish(self, idrac_connection_firmware_mock, idrac_default_args, re_match_mock,
- mocker, idrac_connection_firmware_redfish_mock,
- fileonshare_idrac_firmware_mock):
- idrac_default_args.update({"share_name": "192.168.0.1:/share_name", "catalog_file_name": "catalog.xml",
- "share_user": "shareuser", "share_password": "sharepswd",
- "share_mnt": "sharmnt",
- "reboot": True, "job_wait": False, "ignore_cert_warning": True,
- "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
- "idrac_password": "idrac_password", "idrac_port": 443, 'apply_update': True
- })
- mocker.patch(MODULE_PATH + "idrac_firmware.SHARE_TYPE",
- return_value={"NFS": "NFS"})
- mocker.patch(MODULE_PATH + "idrac_firmware.eval",
- return_value={"PackageList": []})
- mocker.patch(MODULE_PATH + "idrac_firmware.wait_for_job_completion", return_value=({}, None))
- f_module = self.get_module_mock(params=idrac_default_args)
- re_mock = mocker.patch(MODULE_PATH + "idrac_firmware.re",
- return_value=MagicMock())
- re_mock.match(MagicMock(), MagicMock()).group.return_value = "3.60"
- mocker.patch(MODULE_PATH + "idrac_firmware.get_jobid",
- return_value="23451")
- idrac_connection_firmware_mock.idrac.update_mgr.job_mgr.get_job_status_redfish.return_value = "23451"
- idrac_connection_firmware_mock.ServerGeneration = "14"
- upd_share = fileonshare_idrac_firmware_mock
- upd_share.remote_addr.return_value = "192.168.0.1"
- upd_share.remote.share_name.return_value = "share_name"
- upd_share.remote_share_type.name.lower.return_value = "NFS"
- result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module)
- assert result['update_msg'] == "Successfully triggered the job to update the firmware."
-
- def _test_get_job_status(self, idrac_connection_firmware_mock, idrac_default_args,
- mocker, idrac_connection_firmware_redfish_mock):
- idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
- "share_user": "shareuser", "share_password": "sharepswd",
- "share_mnt": "sharmnt", "apply_update": False,
- "reboot": True, "job_wait": False, "ignore_cert_warning": True,
- "share_type": "http", "idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
- "idrac_password": "idrac_password", "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_connection_firmware_redfish_mock.success = True
- idrac_connection_firmware_redfish_mock.json_data = {"JobStatus": "OK"}
- each_comp = {"JobID": "JID_1234567", "Messages": [{"Message": "test_message"}], "JobStatus": "Completed"}
- result = self.module.get_job_status(f_module, each_comp, None)
- assert result[1] is False
+ assert result == ({'job_details': {'Data': {'GetRepoBasedUpdateList_OUTPUT': {'Message': [{}]}}}}, {})
def test_message_verification(self, idrac_connection_firmware_mock, idrac_connection_firmware_redfish_mock,
idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "http://downloads.dell.com", "catalog_file_name": "catalog.xml",
- "share_user": "shareuser", "share_password": "sharepswd",
+ idrac_default_args.update({"share_name": DELL_SHARE, "catalog_file_name": CATALOG,
+ "share_user": "shareuser", "share_password": SHARE_PWD,
"share_mnt": "sharmnt", "apply_update": False,
"reboot": False, "job_wait": True, "ignore_cert_warning": True,
"idrac_ip": "idrac_ip", "idrac_user": "idrac_user",
- "idrac_password": "idrac_password", "idrac_port": 443})
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=("INSTANCENAME", False, False))
- # mocker.patch(MODULE_PATH + "idrac_firmware.re")
+ "idrac_password": "idrac_password", "idrac_port": 443, "proxy_support": "Off", })
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=("INSTANCENAME", False, False))
idrac_connection_firmware_redfish_mock.success = True
idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
assert result['update_msg'] == "Successfully fetched the applicable firmware update package list."
-
idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": False})
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
assert result['update_msg'] == "Successfully triggered the job to stage the firmware."
-
idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True})
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
assert result['update_msg'] == "Successfully staged the applicable firmware update packages."
-
idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True})
mocker.patch(MODULE_PATH + "idrac_firmware.update_firmware_url_omsdk",
return_value=({"Status": "Success"}, {"PackageList": []}))
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, True))
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({}, True, True))
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
assert result['update_msg'] == "Successfully staged the applicable firmware update packages with error(s)."
-
idrac_default_args.update({"apply_update": True, "reboot": True, "job_wait": True})
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, False))
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({}, True, False))
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
- assert result['update_msg'] == "Successfully updated the firmware."
-
+ assert result['update_msg'] == SUCCESS_MSG
idrac_default_args.update({"apply_update": True, "reboot": True, "job_wait": True})
- mocker.patch(MODULE_PATH + "idrac_firmware._convert_xmltojson", return_value=({}, True, True))
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({}, True, True))
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module.update_firmware_omsdk(idrac_connection_firmware_mock, f_module)
assert result['update_msg'] == "Firmware update failed."
+
+ def test_update_firmware_redfish_success_case03(self, idrac_connection_firmware_mock,
+ idrac_connection_firmware_redfish_mock,
+ idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": DELL_SHARE, "catalog_file_name": CATALOG,
+ "share_user": "UserName", "share_password": SHARE_PWD, "share_mnt": "shrmnt",
+ "reboot": True, "job_wait": False, "ignore_cert_warning": True, "apply_update": True})
+ mocker.patch(MODULE_PATH + UPDATE_URL,
+ return_value=({"job_details": {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}},
+ {"Data": {"StatusCode": 200, "body": {"PackageList": [{}]}}}))
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON,
+ return_value=({"BaseLocation": None, "ComponentID": "18981", "ComponentType": "APAC", "Criticality": "3",
+ "DisplayName": "Dell OS Driver Pack", "JobID": None,
+ "PackageName": "Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackagePath": "FOLDER05902898M/1/Drivers-for-OS-Deployment_Application_X0DW6_WN64_19.10.12_A00.EXE",
+ "PackageVersion": "19.10.12", "RebootType": "NONE",
+ "Target": "DCIM:INSTALLED#802__DriverPack.Embedded.1:LC.Embedded.1"}, True))
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.re_match_mock.group = Mock(return_value="3.30")
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "3.30"}
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=("INSTANCENAME", False, False))
+ idrac_connection_firmware_mock.ServerGeneration = "14"
+ result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert result["changed"] is False
+ assert result["update_msg"] == "Successfully triggered the job to update the firmware."
+ idrac_default_args.update({"proxy_support": "ParametersProxy", "proxy_server": "127.0.0.2", "proxy_port": 3128,
+ "proxy_type": "HTTP", "proxy_uname": "username", "proxy_passwd": "pwd", "apply_update": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB, return_value=({"JobStatus": "Ok"}, ""))
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({"PackageList": []}, False, False))
+ mocker.patch(MODULE_PATH + UPDATE_URL,
+ return_value=({"JobStatus": "Ok"}, {"Status": "Success", "JobStatus": "Ok",
+ "Data": {"GetRepoBasedUpdateList_OUTPUT": {}}}))
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Unable to complete the firmware repository download.'
+ idrac_default_args.update({"share_name": "\\\\127.0.0.1\\cifsshare"})
+ idrac_connection_firmware_mock.json_data = {"Status": "Success"}
+ mocker.patch(MODULE_PATH + GET_JOBID, return_value=None)
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB,
+ return_value=({"JobStatus": "Ok"}, {"job_details": "", "JobStatus": "Ok"}))
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Unable to complete the firmware repository download.'
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True})
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB,
+ return_value=({"JobStatus": "OK"}, {"job_details": "", "JobStatus": "OK"}))
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Changes found to commit!'
+ f_module.check_mode = False
+ idrac_default_args.update({"apply_update": True, "reboot": True, "job_wait": True, "share_name": "https://127.0.0.2/httpshare"})
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({"PackageList": []}, True, False))
+ mocker.patch(MODULE_PATH + UPDATE_URL, return_value=(
+ {"JobStatus": "Ok"}, {"Status": "Success", "JobStatus": "Ok", "PackageList": [],
+ "Data": {"GetRepoBasedUpdateList_OUTPUT": {}}}))
+ result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert result["update_msg"] == SUCCESS_MSG
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({"PackageList": []}, True, True))
+ result = self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert result["update_msg"] == "Firmware update failed."
+ idrac_default_args.update({"apply_update": False})
+ mocker.patch(MODULE_PATH + UPDATE_URL,
+ return_value=({"JobStatus": "Critical"}, {"Status": "Success", "JobStatus": "Critical", "PackageList": [],
+ "Data": {"GetRepoBasedUpdateList_OUTPUT": {}}}))
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Unable to complete the repository update.'
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": True, "share_name": "https://127.0.0.3/httpshare"})
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Firmware update failed.'
+ idrac_default_args.update({"apply_update": True, "reboot": False, "job_wait": False, "share_name": "https://127.0.0.4/httpshare"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ mocker.patch(MODULE_PATH + CONVERT_XML_JSON, return_value=({"PackageList": []}, True, False))
+ mocker.patch(MODULE_PATH + UPDATE_URL,
+ return_value=({"JobStatus": "OK"}, {"Status": "Success", "JobStatus": "OK", "PackageList": ['test'],
+ "Data": {"key": "value"}}))
+ with pytest.raises(Exception) as exc:
+ self.module.update_firmware_redfish(idrac_connection_firmware_mock, f_module, {})
+ assert exc.value.args[0] == 'Changes found to commit!'
+
+ def test_main_idrac_firmware_success_case(self, idrac_connection_firmware_redfish_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True})
+ message = {"Status": "Success", "update_msg": SUCCESS_MSG,
+ "update_status": "Success", 'changed': False, 'failed': False}
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {}
+ mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_redfish', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result == {'msg': 'Successfully updated the firmware.', 'update_status': 'Success',
+ 'changed': False, 'failed': False}
+
+ def test_main_HTTPError_case(self, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt",
+ "reboot": True, "job_wait": True})
+ json_str = to_text(json.dumps({"data": "out"}))
+ idrac_connection_firmware_redfish_mock.success = True
+ idrac_connection_firmware_redfish_mock.json_data = {"FirmwareVersion": "2.70"}
+ mocker.patch(MODULE_PATH + 'idrac_firmware.update_firmware_omsdk',
+ side_effect=HTTPError('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert 'msg' in result
+ assert result['failed'] is True
+
+ def test_get_jobid(self, idrac_connection_firmware_mock, idrac_default_args):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ idrac_connection_firmware_mock.status_code = 202
+ idrac_connection_firmware_mock.headers = {"Location": "/uri/JID_123456789"}
+ result = self.module.get_jobid(f_module, idrac_connection_firmware_mock)
+ assert result == "JID_123456789"
+ idrac_connection_firmware_mock.headers = {"Location": None}
+ with pytest.raises(Exception) as exc:
+ self.module.get_jobid(f_module, idrac_connection_firmware_mock)
+ assert exc.value.args[0] == 'Failed to update firmware.'
+ idrac_connection_firmware_mock.status_code = 200
+ with pytest.raises(Exception) as exc:
+ self.module.get_jobid(f_module, idrac_connection_firmware_mock)
+ assert exc.value.args[0] == 'Failed to update firmware.'
+
+ def test_handle_HTTP_error(self, idrac_default_args, mocker):
+ error_message = {"error": {"@Message.ExtendedInfo": [{"Message": "Http error message", "MessageId": "SUP029"}]}}
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_firmware.json.load', return_value=error_message)
+ with pytest.raises(Exception) as exc:
+ self.module.handle_HTTP_error(f_module, error_message)
+ assert exc.value.args[0] == 'Http error message'
+
+ def test_get_job_status(self, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ each_comp = {"JobID": "JID_123456789", "Message": "Invalid", "JobStatus": "Ok"}
+ idrac_connection_firmware_redfish_mock.job_mgr.job_wait.return_value = {"JobStatus": "Completed", "Message": "Invalid"}
+ comp, failed = self.module.get_job_status(f_module, each_comp, idrac_connection_firmware_redfish_mock)
+ assert comp == {'JobID': 'JID_123456789', 'Message': 'Invalid', 'JobStatus': 'Critical'}
+ assert failed
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB,
+ return_value=(idrac_connection_firmware_redfish_mock, ""))
+ each_comp = {"JobID": "JID_123456789", "Message": "Invalid", "JobStatus": "Critical"}
+ idrac_connection_firmware_redfish_mock.json_data = {"Messages": [{"Message": "Success"}], "JobStatus": "Critical"}
+ comp, failed = self.module.get_job_status(f_module, each_comp, None)
+ assert comp == {'JobID': 'JID_123456789', 'Message': 'Success', 'JobStatus': 'Critical'}
+ assert failed
+
+ def test_wait_for_job_completion(self, idrac_default_args, idrac_connection_firm_mock, redfish_response_mock):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ result, msg = self.module.wait_for_job_completion(f_module, "JobService/Jobs/JID_1234567890")
+ assert msg is None
+ redfish_response_mock.json_data = {"Members": {}, "JobState": "Completed", "PercentComplete": 100}
+ result, msg = self.module.wait_for_job_completion(f_module, "JobService/Jobs/JID_12345678", job_wait=True)
+ assert result.json_data["JobState"] == "Completed"
+ redfish_response_mock.json_data = {"Members": {}, "JobState": "New", "PercentComplete": 0}
+ result, msg = self.module.wait_for_job_completion(f_module, "JobService/Jobs/JID_123456789", job_wait=True, apply_update=True)
+ assert result.json_data["JobState"] == "New"
+
+ @pytest.mark.parametrize("exc_type", [TypeError])
+ def test_wait_for_job_completion_exception(self, exc_type, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + TIME_SLEEP, return_value=None)
+ if exc_type == TypeError:
+ idrac_connection_firmware_redfish_mock.invoke_request.side_effect = exc_type("exception message")
+ result, msg = self.module.wait_for_job_completion(f_module, "JobService/Jobs/JID_123456789", job_wait=True)
+ assert msg == "Job wait timed out after 120.0 minutes"
+
+ def test_get_check_mode_status_check_mode(self, idrac_default_args):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ status = {"job_details": {"Data": {"GetRepoBasedUpdateList_OUTPUT": {
+ "Message": "Firmware versions on server match catalog, applicable updates are not present in the repository"}}},
+ "JobStatus": "Completed"}
+ with pytest.raises(Exception) as ex:
+ self.module.get_check_mode_status(status, f_module)
+ assert ex.value.args[0] == "No changes found to commit!"
+ f_module.check_mode = False
+ with pytest.raises(Exception) as ex:
+ self.module.get_check_mode_status(status, f_module)
+ assert ex.value.args[0] == "The catalog in the repository specified in the operation has the same firmware versions as currently present on the server."
+
+ def test_update_firmware_url_redfish(self, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + TIME_SLEEP, return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_firmware.get_error_syslog', return_value=(True, "Failed to update firmware."))
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB, return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_firmware.get_jobid', return_value="JID_123456789")
+ mocker.patch(MODULE_PATH + 'idrac_firmware.handle_HTTP_error', return_value=None)
+ actions = {"Actions": {"#DellSoftwareInstallationService.InstallFromRepository": {"target": "/api/installRepository"},
+ "#DellSoftwareInstallationService.GetRepoBasedUpdateList": {"target": "/api/getRepoBasedUpdateList"}}}
+ idrac_connection_firmware_redfish_mock.json_data = {"Entries": {"@odata.id": "/api/log"}, "DateTime": "2023-10-05"}
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_url_redfish(f_module, idrac_connection_firmware_redfish_mock,
+ "https://127.0.0.1/httpshare", True, True, True, {}, actions)
+ assert ex.value.args[0] == "Failed to update firmware."
+ mocker.patch(MODULE_PATH + 'idrac_firmware.get_error_syslog', return_value=(False, ""))
+ mocker.patch(MODULE_PATH + WAIT_FOR_JOB, return_value=(None, "Successfully updated."))
+ result, msg = self.module.update_firmware_url_redfish(f_module, idrac_connection_firmware_redfish_mock,
+ "https://127.0.0.1/httpshare", True, True, True, {}, actions)
+ assert result["update_msg"] == "Successfully updated."
+
+ def test_get_error_syslog(self, idrac_default_args, idrac_connection_firm_mock, redfish_response_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ self.get_module_mock(params=idrac_default_args)
+ redfish_response_mock.json_data = {"Members": [{"MessageId": "SYS229"}], "Entries": {"@odata.id": "/api/log"}}
+ mocker.patch(MODULE_PATH + TIME_SLEEP, return_value=None)
+ result = self.module.get_error_syslog(idrac_connection_firm_mock, "", "/api/service")
+ assert result[0]
+
+ def test_update_firmware_omsdk(self, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD, "ignore_cert_warning": False,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ mocker.patch(MODULE_PATH + 'idrac_firmware.FileOnShare', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_firmware.get_check_mode_status', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_firmware._convert_xmltojson', return_value=([], True, False))
+ status = {"job_details": {"Data": {"GetRepoBasedUpdateList_OUTPUT": {"PackageList": []}}}, "JobStatus": "Completed"}
+ idrac_connection_firmware_redfish_mock.update_mgr.update_from_repo.return_value = status
+ result = self.module.update_firmware_omsdk(idrac_connection_firmware_redfish_mock, f_module)
+ assert result['update_msg'] == 'Successfully triggered the job to update the firmware.'
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_omsdk(idrac_connection_firmware_redfish_mock, f_module)
+ assert ex.value.args[0] == "Changes found to commit!"
+ status.update({"JobStatus": "InProgress"})
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_omsdk(idrac_connection_firmware_redfish_mock, f_module)
+ assert ex.value.args[0] == "Unable to complete the firmware repository download."
+ status = {"job_details": {"Data": {}, "PackageList": []}, "JobStatus": "Completed", "Status": "Failed"}
+ idrac_connection_firmware_redfish_mock.update_mgr.update_from_repo.return_value = status
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_omsdk(idrac_connection_firmware_redfish_mock, f_module)
+ assert ex.value.args[0] == "No changes found to commit!"
+ idrac_default_args.update({"apply_update": False})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = False
+ with pytest.raises(Exception) as ex:
+ self.module.update_firmware_omsdk(idrac_connection_firmware_redfish_mock, f_module)
+ assert ex.value.args[0] == "Unable to complete the repository update."
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, IOError, AssertionError, OSError])
+ def test_main(self, idrac_default_args, idrac_connection_firmware_redfish_mock, mocker, exc_type):
+ idrac_default_args.update({"share_name": "sharename", "catalog_file_name": CATALOG,
+ "share_user": "sharename", "share_password": SHARE_PWD, "ignore_cert_warning": False,
+ "share_mnt": "sharmnt", "reboot": True, "job_wait": True, "apply_update": True})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ f_module.check_mode = True
+ idrac_connection_firmware_redfish_mock.status_code = 400
+ idrac_connection_firmware_redfish_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + VALIDATE_CATALOG,
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + VALIDATE_CATALOG,
+ side_effect=exc_type(TEST_HOST, 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ if exc_type == HTTPError:
+ result = self._run_module(idrac_default_args)
+ assert result['failed'] is True
+ elif exc_type == URLError:
+ result = self._run_module(idrac_default_args)
+ assert result['unreachable'] is True
+ else:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'error_info' in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py
index 787dba2c7..b821c9556 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_firmware_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,7 +15,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_firmware_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock, PropertyMock
from pytest import importorskip
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -67,7 +67,7 @@ class TestFirmware(FakeAnsibleModule):
if exc_type not in [HTTPError, SSLValidationError]:
type(obj2).InstalledFirmware = PropertyMock(side_effect=exc_type('test'))
else:
- type(obj2).InstalledFirmware = PropertyMock(side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ type(obj2).InstalledFirmware = PropertyMock(side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py
new file mode 100644
index 000000000..a07cc1eb1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py
@@ -0,0 +1,746 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.7.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+from io import StringIO
+import json
+import tempfile
+import os
+
+import pytest
+from urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_license
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+from ansible_collections.dellemc.openmanage.plugins.modules.idrac_license import main
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_license.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+INVALID_LICENSE_MSG = "License with ID '{license_id}' does not exist on the iDRAC."
+SUCCESS_EXPORT_MSG = "Successfully exported the license."
+SUCCESS_DELETE_MSG = "Successfully deleted the license."
+SUCCESS_IMPORT_MSG = "Successfully imported the license."
+FAILURE_MSG = "Unable to '{operation}' the license with id '{license_id}' as it does not exist."
+FAILURE_IMPORT_MSG = "Unable to import the license."
+NO_FILE_MSG = "License file not found."
+UNSUPPORTED_FIRMWARE_MSG = "iDRAC firmware version is not supported."
+NO_OPERATION_SKIP_MSG = "Task is skipped as none of import, export or delete is specified."
+INVALID_FILE_MSG = "File extension is invalid. Supported extensions for local 'share_type' " \
+ "are: .txt and .xml, and for network 'share_type' is: .xml."
+INVALID_DIRECTORY_MSG = "Provided directory path '{path}' is not valid."
+INSUFFICIENT_DIRECTORY_PERMISSION_MSG = "Provided directory path '{path}' is not writable. " \
+ "Please check if the directory has appropriate permissions"
+MISSING_FILE_NAME_PARAMETER_MSG = "Missing required parameter 'file_name'."
+REDFISH = "/redfish/v1"
+
+LIC_GET_LICENSE_URL = "License.get_license_url"
+REDFISH_LICENSE_URL = "/redfish/v1/license"
+REDFISH_BASE_API = '/redfish/v1/api'
+MANAGER_URI_ONE = "/redfish/v1/managers/1"
+API_ONE = "/local/action"
+EXPORT_URL_MOCK = '/redfish/v1/export_license'
+IMPORT_URL_MOCK = '/redfish/v1/import_license'
+API_INVOKE_MOCKER = "iDRACRedfishAPI.invoke_request"
+ODATA = "@odata.id"
+IDRAC_ID = "iDRAC.Embedded.1"
+LIC_FILE_NAME = 'test_lic.txt'
+HTTPS_PATH = "https://testhost.com"
+HTTP_ERROR = "http error message"
+APPLICATION_JSON = "application/json"
+
+
+class TestLicense(FakeAnsibleModule):
+ module = idrac_license
+
+ @pytest.fixture
+ def idrac_license_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_license_mock(self, mocker, idrac_license_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_license_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_license_mock
+ return idrac_conn_mock
+
+ def test_check_license_id(self, idrac_default_args, idrac_connection_license_mock,
+ idrac_license_mock, mocker):
+ mocker.patch(MODULE_PATH + LIC_GET_LICENSE_URL,
+ return_value=REDFISH_LICENSE_URL)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ lic_obj = self.module.License(
+ idrac_connection_license_mock, f_module)
+
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ data = lic_obj.check_license_id(license_id="1234")
+ assert data.json_data == {"license_id": "1234"}
+
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ side_effect=HTTPError(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO("json_str")))
+ with pytest.raises(Exception) as exc:
+ lic_obj.check_license_id(license_id="1234")
+ assert exc.value.args[0] == INVALID_LICENSE_MSG.format(license_id="1234")
+
+ def test_get_license_url(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ v1_resp = {"LicenseService": {ODATA: "/redfish/v1/LicenseService"},
+ "Licenses": {ODATA: "/redfish/v1/LicenseService/Licenses"}}
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value=v1_resp)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ lic_obj = self.module.License(
+ idrac_connection_license_mock, f_module)
+ data = lic_obj.get_license_url()
+ assert data == "/redfish/v1/LicenseService/Licenses"
+
+ def test_get_job_status_success(self, mocker, idrac_license_mock):
+ # Mocking necessary objects and functions
+ module_mock = self.get_module_mock()
+ license_job_response_mock = mocker.MagicMock()
+ license_job_response_mock.headers.get.return_value = "HTTPS_PATH/job_tracking/12345"
+
+ mocker.patch(MODULE_PATH + "remove_key", return_value={"job_details": "mocked_job_details"})
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri", return_value=[MANAGER_URI_ONE])
+
+ # Creating an instance of the class
+ obj_under_test = self.module.License(idrac_license_mock, module_mock)
+
+ # Mocking the idrac_redfish_job_tracking function to simulate a successful job tracking
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(False, "mocked_message", {"job_details": "mocked_job_details"}, 0))
+
+ # Calling the method under test
+ result = obj_under_test.get_job_status(license_job_response_mock)
+
+ # Assertions
+ assert result == {"job_details": "mocked_job_details"}
+
+ def test_get_job_status_failure(self, mocker, idrac_license_mock):
+ # Mocking necessary objects and functions
+ module_mock = self.get_module_mock()
+ license_job_response_mock = mocker.MagicMock()
+ license_job_response_mock.headers.get.return_value = "HTTPS_PATH/job_tracking/12345"
+
+ mocker.patch(MODULE_PATH + "remove_key", return_value={"Message": "None"})
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri", return_value=[MANAGER_URI_ONE])
+
+ # Creating an instance of the class
+ obj_under_test = self.module.License(idrac_license_mock, module_mock)
+
+ # Mocking the idrac_redfish_job_tracking function to simulate a failed job tracking
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(True, "None", {"Message": "None"}, 0))
+
+ # Mocking module.exit_json
+ exit_json_mock = mocker.patch.object(module_mock, "exit_json")
+
+ # Calling the method under test
+ result = obj_under_test.get_job_status(license_job_response_mock)
+
+ # Assertions
+ exit_json_mock.assert_called_once_with(msg="None", failed=True, job_details={"Message": "None"})
+ assert result == {"Message": "None"}
+
+ def test_get_share_details(self, idrac_connection_license_mock):
+ # Create a mock module object
+ module_mock = MagicMock()
+ module_mock.params.get.return_value = {
+ 'ip_address': 'XX.XX.XX.XX',
+ 'share_name': 'my_share',
+ 'username': 'my_user',
+ 'password': 'my_password'
+ }
+
+ # Create an instance of the License class
+ lic_obj = self.module.License(idrac_connection_license_mock, module_mock)
+
+ # Call the get_share_details method
+ result = lic_obj.get_share_details()
+
+ # Assert the result
+ assert result == {
+ 'IPAddress': 'XX.XX.XX.XX',
+ 'ShareName': 'my_share',
+ 'UserName': 'my_user',
+ 'Password': 'my_password'
+ }
+
+ def test_get_proxy_details(self, idrac_connection_license_mock):
+ # Create a mock module object
+ module_mock = MagicMock()
+ module_mock.params.get.return_value = {
+ 'ip_address': 'XX.XX.XX.XX',
+ 'share_name': 'my_share',
+ 'username': 'my_user',
+ 'password': 'my_password',
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'off',
+ 'proxy_support': 'parameters_proxy',
+ 'proxy_type': 'http',
+ 'proxy_server': 'proxy.example.com',
+ 'proxy_port': 8080,
+ 'proxy_username': 'my_username',
+ 'proxy_password': 'my_password'
+ }
+
+ # Create an instance of the License class
+ lic_obj = self.module.License(idrac_connection_license_mock, module_mock)
+
+ # Call the get_proxy_details method
+ result = lic_obj.get_proxy_details()
+
+ # Define the expected result
+ expected_result = {
+ 'IPAddress': 'XX.XX.XX.XX',
+ 'ShareName': 'my_share',
+ 'UserName': 'my_user',
+ 'Password': 'my_password',
+ 'ShareType': 'HTTP',
+ 'IgnoreCertWarning': 'Off',
+ 'ProxySupport': 'ParametersProxy',
+ 'ProxyType': 'HTTP',
+ 'ProxyServer': 'proxy.example.com',
+ 'ProxyPort': '8080',
+ 'ProxyUname': 'my_username',
+ 'ProxyPasswd': 'my_password'
+ }
+
+ # Assert the result
+ assert result == expected_result
+
+
+class TestDeleteLicense:
+ @pytest.fixture
+ def delete_license_mock(self):
+ delete_license_obj = MagicMock()
+ return delete_license_obj
+
+ @pytest.fixture
+ def idrac_connection_license_mock(self, mocker, delete_license_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=delete_license_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = delete_license_mock
+ return idrac_conn_mock
+
+ def test_execute_delete_license_success(self, mocker, idrac_connection_license_mock):
+ mocker.patch(MODULE_PATH + LIC_GET_LICENSE_URL,
+ return_value=REDFISH_LICENSE_URL)
+ f_module = MagicMock()
+ f_module.params = {'license_id': '1234'}
+ delete_license_obj = idrac_license.DeleteLicense(idrac_connection_license_mock, f_module)
+ delete_license_obj.idrac.invoke_request.return_value.status_code = 204
+ delete_license_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=SUCCESS_DELETE_MSG, changed=True)
+
+ def test_execute_delete_license_failure(self, mocker, idrac_connection_license_mock):
+ mocker.patch(MODULE_PATH + LIC_GET_LICENSE_URL,
+ return_value=REDFISH_LICENSE_URL)
+ f_module = MagicMock()
+ f_module.params = {'license_id': '5678'}
+ delete_license_obj = idrac_license.DeleteLicense(idrac_connection_license_mock, f_module)
+ delete_license_obj.idrac.invoke_request.return_value.status_code = 404
+ delete_license_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=FAILURE_MSG.format(operation="delete", license_id="5678"), failed=True)
+
+
+class TestExportLicense(FakeAnsibleModule):
+ module = idrac_license
+
+ @pytest.fixture
+ def idrac_license_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_license_mock(self, mocker, idrac_license_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_license_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_license_mock
+ return idrac_conn_mock
+
+ def test_export_license_local(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ tmp_path = tempfile.gettempdir()
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'share_name': str(tmp_path),
+ 'file_name': 'test_lic'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ result = export_license_obj._ExportLicense__export_license_local(EXPORT_URL_MOCK)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+ assert os.path.exists(f"{tmp_path}/test_lic_iDRAC_license.txt")
+ if os.path.exists(f"{tmp_path}/test_lic_iDRAC_license.txt"):
+ os.remove(f"{tmp_path}/test_lic_iDRAC_license.txt")
+
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'share_name': str(tmp_path),
+ }
+ }
+ idrac_default_args.update(export_params)
+ result = export_license_obj._ExportLicense__export_license_local(EXPORT_URL_MOCK)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+ assert os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.txt")
+ if os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.txt"):
+ os.remove(f"{tmp_path}/test_license_id_iDRAC_license.txt")
+
+ def test_export_license_http(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ result = export_license_obj._ExportLicense__export_license_http(EXPORT_URL_MOCK)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_export_license_cifs(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'cifs',
+ 'ignore_certificate_warning': 'off',
+ 'workgroup': "mydomain"
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ result = export_license_obj._ExportLicense__export_license_cifs(EXPORT_URL_MOCK)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_export_license_nfs(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'nfs',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ result = export_license_obj._ExportLicense__export_license_nfs(EXPORT_URL_MOCK)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_get_export_license_url(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'local',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {"Oem": {"Dell": {"DellLicenseManagementService": {ODATA: "/LicenseService"}}}},
+ "Actions": {"#DellLicenseManagementService.ExportLicense": {"target": API_ONE}}})
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ result = export_license_obj._ExportLicense__get_export_license_url()
+ assert result == API_ONE
+
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, "error"))
+ with pytest.raises(Exception) as exc:
+ export_license_obj._ExportLicense__get_export_license_url()
+ assert exc.value.args[0] == "error"
+
+ def test_execute(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ share_type = 'local'
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': share_type
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + "License.check_license_id")
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__get_export_license_url",
+ return_value="/License/url")
+ mocker.patch(MODULE_PATH + "ExportLicense.get_job_status",
+ return_value={"JobId": "JID1234"})
+ idr_obj = MagicMock()
+ idr_obj.status_code = 200
+
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__export_license_local",
+ return_value=idr_obj)
+ export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ export_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_EXPORT_MSG
+
+ export_params.get('share_parameters')["share_type"] = "http"
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__export_license_http",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ export_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_EXPORT_MSG
+
+ export_params.get('share_parameters')["share_type"] = "cifs"
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__export_license_cifs",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ export_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_EXPORT_MSG
+
+ export_params.get('share_parameters')["share_type"] = "nfs"
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__export_license_nfs",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ export_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_EXPORT_MSG
+
+ export_params.get('share_parameters')["share_type"] = "https"
+ idr_obj.status_code = 400
+ mocker.patch(MODULE_PATH + "ExportLicense._ExportLicense__export_license_http",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ export_license_obj.execute()
+ assert exc.value.args[0] == FAILURE_MSG.format(operation="export", license_id="test_license_id")
+
+
+class TestImportLicense(FakeAnsibleModule):
+ module = idrac_license
+
+ @pytest.fixture
+ def idrac_license_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_license_mock(self, mocker, idrac_license_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_license_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_license_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ share_type = 'local'
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic.xml',
+ 'share_type': share_type
+ }
+ }
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__get_import_license_url",
+ return_value="/License/url")
+ mocker.patch(MODULE_PATH + "get_manager_res_id",
+ return_value=IDRAC_ID)
+ mocker.patch(MODULE_PATH + "ImportLicense.get_job_status",
+ return_value={"JobId": "JID1234"})
+ idr_obj = MagicMock()
+ idr_obj.status_code = 200
+
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__import_license_local",
+ return_value=idr_obj)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ import_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_IMPORT_MSG
+
+ import_params.get('share_parameters')["share_type"] = "http"
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__import_license_http",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ import_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_IMPORT_MSG
+
+ import_params.get('share_parameters')["share_type"] = "cifs"
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__import_license_cifs",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ import_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_IMPORT_MSG
+
+ import_params.get('share_parameters')["share_type"] = "nfs"
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__import_license_nfs",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ import_license_obj.execute()
+ assert exc.value.args[0] == SUCCESS_IMPORT_MSG
+
+ import_params.get('share_parameters')["share_type"] = "https"
+ idr_obj.status_code = 400
+ mocker.patch(MODULE_PATH + "ImportLicense._ImportLicense__import_license_http",
+ return_value=idr_obj)
+ with pytest.raises(Exception) as exc:
+ import_license_obj.execute()
+ assert exc.value.args[0] == FAILURE_IMPORT_MSG
+
+ def test_import_license_local(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ tmp_path = tempfile.gettempdir()
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'share_name': 'doesnotexistpath',
+ 'file_name': LIC_FILE_NAME
+ }
+ }
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ import_license_obj._ImportLicense__import_license_local(EXPORT_URL_MOCK, IDRAC_ID)
+ assert exc.value.args[0] == INVALID_DIRECTORY_MSG.format(path='doesnotexistpath')
+
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'share_name': str(tmp_path),
+ 'file_name': LIC_FILE_NAME
+ }
+ }
+ file_name = os.path.join(tmp_path, LIC_FILE_NAME)
+ with open(file_name, "w") as fp:
+ fp.writelines("license_file")
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ result = import_license_obj._ImportLicense__import_license_local(EXPORT_URL_MOCK, IDRAC_ID)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+ assert os.path.exists(file_name)
+
+ json_str = to_text(json.dumps({"error": {'@Message.ExtendedInfo': [
+ {
+ 'MessageId': "LIC018",
+ "Message": "Already imported"
+ }
+ ]}}))
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ side_effect=HTTPError(HTTPS_PATH, 400, HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON}, StringIO(json_str)))
+ with pytest.raises(Exception) as exc:
+ import_license_obj._ImportLicense__import_license_local(EXPORT_URL_MOCK, IDRAC_ID)
+ assert exc.value.args[0] == "Already imported"
+
+ if os.path.exists(file_name):
+ os.remove(file_name)
+
+ def test_import_license_http(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ result = import_license_obj._ImportLicense__import_license_http(IMPORT_URL_MOCK, IDRAC_ID)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_import_license_cifs(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'cifs',
+ 'ignore_certificate_warning': 'off',
+ 'workgroup': 'mydomain'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ result = import_license_obj._ImportLicense__import_license_cifs(IMPORT_URL_MOCK, IDRAC_ID)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_import_license_nfs(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ import_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'nfs',
+ 'ignore_certificate_warning': 'off',
+ 'workgroup': 'mydomain'
+ }
+ }
+ idr_obj = MagicMock()
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=idr_obj)
+ idrac_default_args.update(import_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ result = import_license_obj._ImportLicense__import_license_nfs(IMPORT_URL_MOCK, IDRAC_ID)
+ assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
+
+ def test_get_import_license_url(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ export_params = {
+ 'license_id': 'test_license_id',
+ 'share_parameters': {
+ 'file_name': 'test_lic',
+ 'share_type': 'local',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {"Oem": {"Dell": {"DellLicenseManagementService": {ODATA: "/LicenseService"}}}},
+ "Actions": {"#DellLicenseManagementService.ImportLicense": {"target": API_ONE}}})
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+ result = import_license_obj._ImportLicense__get_import_license_url()
+ assert result == API_ONE
+
+ def test_get_job_status(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri", return_value=[MANAGER_URI_ONE])
+ lic_job_resp_obj = MagicMock()
+ lic_job_resp_obj.headers = {"Location": "idrac_internal"}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ import_license_obj = self.module.ImportLicense(idrac_connection_license_mock, f_module)
+
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(False, "None", {"JobId": "JID1234"}, 0))
+ result = import_license_obj.get_job_status(lic_job_resp_obj)
+ assert result == {"JobId": "JID1234"}
+
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(True, "None", {"Message": "Got LIC018",
+ "MessageId": "LIC018"}, 0))
+ with pytest.raises(Exception) as exc:
+ import_license_obj.get_job_status(lic_job_resp_obj)
+ assert exc.value.args[0] == "Got LIC018"
+
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(True, "None", {"Message": "Got LIC019",
+ "MessageId": "LIC019"}, 0))
+ with pytest.raises(Exception) as exc:
+ import_license_obj.get_job_status(lic_job_resp_obj)
+ assert exc.value.args[0] == "Got LIC019"
+
+
+class TestLicenseType(FakeAnsibleModule):
+ module = idrac_license
+
+ @pytest.fixture
+ def idrac_license_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_license_mock(self, mocker, idrac_license_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_license_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_license_mock
+ return idrac_conn_mock
+
+ def test_license_operation(self, idrac_default_args, idrac_connection_license_mock, mocker):
+ idrac_default_args.update({"import": False, "export": False, "delete": True})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ lic_class = self.module.LicenseType.license_operation(idrac_connection_license_mock, f_module)
+ assert isinstance(lic_class, self.module.DeleteLicense)
+
+ idrac_default_args.update({"import": False, "export": True, "delete": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ lic_class = self.module.LicenseType.license_operation(idrac_connection_license_mock, f_module)
+ assert isinstance(lic_class, self.module.ExportLicense)
+
+ idrac_default_args.update({"import": True, "export": False, "delete": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ lic_class = self.module.LicenseType.license_operation(idrac_connection_license_mock, f_module)
+ assert isinstance(lic_class, self.module.ImportLicense)
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_license_main_exception_handling_case(self, exc_type, mocker, idrac_default_args, idrac_connection_license_mock):
+ idrac_default_args.update({"delete": True, "license_id": "1234"})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version",
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ else:
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version",
+ side_effect=exc_type('test'))
+ result = self._run_module(idrac_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ else:
+ assert result['failed'] is True
+ assert 'msg' in result
+
+ def test_main(self, mocker):
+ module_mock = mocker.MagicMock()
+ idrac_mock = mocker.MagicMock()
+ license_mock = mocker.MagicMock()
+
+ # Mock the necessary functions and objects
+ mocker.patch(MODULE_PATH + 'get_argument_spec', return_value={})
+ mocker.patch(MODULE_PATH + 'idrac_auth_params', {})
+ mocker.patch(MODULE_PATH + 'AnsibleModule', return_value=module_mock)
+ mocker.patch(MODULE_PATH + 'iDRACRedfishAPI', return_value=idrac_mock)
+ mocker.patch(MODULE_PATH + 'get_idrac_firmware_version', return_value='3.1')
+ mocker.patch(MODULE_PATH + 'LicenseType.license_operation', return_value=license_mock)
+ main()
+ mocker.patch(MODULE_PATH + 'get_idrac_firmware_version', return_value='2.9')
+ main()
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py
index 39df4e4c6..b5690b037 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -14,7 +14,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_job_status_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock, PropertyMock
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -69,7 +69,7 @@ class TestLcJobStatus(FakeAnsibleModule):
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
- idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.side_effect = exc_type('http://testhost.com', 400,
+ idrac_get_lc_job_status_connection_mock.job_mgr.get_job_status.side_effect = exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py
index 491932673..e4920f199 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,9 +15,9 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_jobs
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.urls import SSLValidationError
from mock import MagicMock, PropertyMock
from io import StringIO
from ansible.module_utils._text import to_text
@@ -76,10 +76,10 @@ class TestDeleteLcJob(FakeAnsibleModule):
idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.side_effect = exc_type('test')
else:
idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_all_jobs.side_effect = \
- exc_type('http://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
+ exc_type('https://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
StringIO(json_str))
idrac_connection_delete_lc_job_queue_mock.job_mgr.delete_job.side_effect = \
- exc_type('http://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
+ exc_type('https://testhost.com', 400, 'http error message', {"accept-type": "application/json"},
StringIO(json_str))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py
index c1a0894e2..2802c3ed5 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,8 +14,8 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_logs
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
@@ -67,23 +67,16 @@ class TestExportLcLogs(FakeAnsibleModule):
result = self._run_module(idrac_default_args)
assert result["msg"] == "Successfully exported the lifecycle controller logs."
- def test_run_export_lc_logs_success_case01(self, idrac_connection_export_lc_logs_mock, idrac_default_args,
- idrac_file_manager_export_lc_logs_mock):
- idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
- "share_password": "sharepassword", "job_wait": True})
- idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {"Status": "Success"}
- f_module = self.get_module_mock(params=idrac_default_args)
- msg = self.module.run_export_lc_logs(idrac_connection_export_lc_logs_mock, f_module)
- assert msg == {'Status': 'Success'}
-
- def test_run_export_lc_logs_status_fail_case01(self, idrac_connection_export_lc_logs_mock, idrac_default_args,
- idrac_file_manager_export_lc_logs_mock):
- idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
- "share_password": "sharepassword", "job_wait": True})
- idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {"Status": "failed"}
- f_module = self.get_module_mock(params=idrac_default_args)
- msg = self.module.run_export_lc_logs(idrac_connection_export_lc_logs_mock, f_module)
- assert msg == {'Status': 'failed'}
+ idrac_default_args.update({"job_wait": False})
+ mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs', return_value=message)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "The export lifecycle controller log job is submitted successfully."
+
+ message = {"Status": "Failed", "JobStatus": "Failed"}
+ mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs', return_value=message)
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result["msg"] == "Unable to export the lifecycle controller logs."
+ assert result["failed"] is True
@pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
ImportError, ValueError, TypeError, HTTPError, URLError])
@@ -98,11 +91,61 @@ class TestExportLcLogs(FakeAnsibleModule):
side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + 'idrac_lifecycle_controller_logs.run_export_lc_logs',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
+ if exc_type != URLError:
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ @pytest.mark.parametrize("args_update", [{"share_user": "share@user"}, {"share_user": "shareuser"}, {"share_user": "share\\user"}])
+ def test_get_user_credentials(self, args_update, idrac_connection_export_lc_logs_mock, idrac_default_args, idrac_file_manager_export_lc_logs_mock, mocker):
+ idrac_default_args.update({"share_name": "sharename",
+ "share_password": "sharepassword", "job_wait": True})
+ obj = MagicMock()
+ obj.IsValid = True
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.file_share_manager.create_share_obj", return_value=(obj))
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idrac_default_args.update(args_update)
+ share = self.module.get_user_credentials(f_module)
+ assert share.IsValid is True
+
+ def test_run_export_lc_logs(self, idrac_connection_export_lc_logs_mock, idrac_default_args, idrac_file_manager_export_lc_logs_mock, mocker):
+ idrac_default_args.update({"idrac_port": 443, "share_name": "sharename", "share_user": "share@user",
+ "share_password": "sharepassword", "job_wait": True})
+ obj = MagicMock()
+ obj._name_ = "AF_INET6"
+ my_share = MagicMock()
+ my_share.new_file.return_value = "idrac_ip_file"
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.file_share_manager.create_share_obj", return_value=(my_share))
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.get_user_credentials", return_value=(my_share))
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.socket.getaddrinfo", return_value=([[obj]]))
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.copy.deepcopy", return_value=("idrac_ip"))
+ # mocker.patch(
+ # MODULE_PATH + "idrac_lifecycle_controller_logs.myshare.new_file", return_value=("idrac_ip_file"))
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.copy.deepcopy", return_value=("idrac_ip"))
+ idrac_connection_export_lc_logs_mock.log_mgr.lclog_export.return_value = {
+ "Status": "Success"}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ msg = self.module.run_export_lc_logs(
+ idrac_connection_export_lc_logs_mock, f_module)
+ assert msg['Status'] == "Success"
+
+ idrac_default_args.update({"idrac_port": 443, "share_name": "sharename", "share_user": "shareuser",
+ "share_password": "sharepassword", "job_wait": True})
+ obj._name_ = "AF_INET4"
+ mocker.patch(
+ MODULE_PATH + "idrac_lifecycle_controller_logs.socket.getaddrinfo", return_value=([[obj]]))
+ msg = self.module.run_export_lc_logs(
+ idrac_connection_export_lc_logs_mock, f_module)
+ assert msg['Status'] == "Success"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py
index d00e2bc06..431dc4b8e 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -14,8 +14,8 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_lifecycle_controller_status_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock, Mock
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from mock import PropertyMock
@@ -74,7 +74,7 @@ class TestLcStatus(FakeAnsibleModule):
assert result['failed'] is True
assert 'msg' in result
else:
- type(obj2).LCReady = PropertyMock(side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ type(obj2).LCReady = PropertyMock(side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
index 10f7183f6..4037c8d05 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,8 +15,8 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_network
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock, Mock
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
@@ -90,7 +90,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -109,7 +109,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -133,7 +133,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -157,7 +157,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -209,7 +209,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -229,7 +229,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -251,7 +251,7 @@ class TestConfigNetwork(FakeAnsibleModule):
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
- "enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
+ "enable_dhcp": "Enabled", "ip_address": "XX.XX.XX.XX", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
@@ -276,7 +276,7 @@ class TestConfigNetwork(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'idrac_network.run_idrac_network_config',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network_attributes.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network_attributes.py
new file mode 100644
index 000000000..e9a6eada2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network_attributes.py
@@ -0,0 +1,1011 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.4.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import \
+ idrac_network_attributes
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import \
+ FakeAnsibleModule
+from mock import MagicMock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+SUCCESS_MSG = "Successfully updated the network attributes."
+SUCCESS_CLEAR_PENDING_ATTR_MSG = "Successfully cleared the pending network attributes."
+SCHEDULE_MSG = "Successfully scheduled the job for network attributes update."
+TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The value for the `job_wait_timeout` parameter cannot be negative or zero."
+MAINTENACE_OFFSET_DIFF_MSG = "The maintenance time must be post-fixed with local offset to {0}."
+MAINTENACE_OFFSET_BEHIND_MSG = "The specified maintenance time window occurs in the past, provide a future time to schedule the maintenance window."
+APPLY_TIME_NOT_SUPPORTED_MSG = "Apply time {0} is not supported."
+INVALID_ATTR_MSG = "Unable to update the network attributes because invalid values are entered. " + \
+ "Enter the valid values for the network attributes and retry the operation."
+VALID_AND_INVALID_ATTR_MSG = "Successfully updated the network attributes for valid values. " + \
+ "Unable to update other attributes because invalid values are entered. Enter the valid values and retry the operation."
+NO_CHANGES_FOUND_MSG = "No changes found to be applied."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+INVALID_ID_MSG = "Unable to complete the operation because the value `{0}` for the input `{1}` parameter is invalid."
+JOB_RUNNING_CLEAR_PENDING_ATTR = "{0} Config job is running. Wait for the job to complete. Currently can not clear pending attributes."
+ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE = 'Attribute is not valid.'
+CLEAR_PENDING_NOT_SUPPORTED_WITHOUT_ATTR_IDRAC8 = "Clear pending is not supported."
+WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+
+
+class TestIDRACNetworkAttributes(FakeAnsibleModule):
+ module = idrac_network_attributes
+ uri = '/redfish/v1/api'
+ links = {
+ "Oem": {
+ "Dell": {
+ "DellNetworkAttributes": {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-1-1/Oem/" +
+ "Dell/DellNetworkAttributes/NIC.Mezzanine.1A-1-1"
+ }
+ }
+ }
+ }
+ redfish_settings = {"@Redfish.Settings": {
+ "SettingsObject": {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-1-1/Oem/Dell/" +
+ "DellNetworkAttributes/NIC.Mezzanine.1A-1-1/Settings"
+ }
+ }
+ }
+
+ @pytest.fixture
+ def idrac_ntwrk_attr_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_ntwrk_attr_mock(self, mocker, idrac_ntwrk_attr_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'idrac_network_attributes.iDRACRedfishAPI',
+ return_value=idrac_ntwrk_attr_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_ntwrk_attr_mock
+ return idrac_conn_mock
+
+ def test_get_registry_fw_less_than_6_more_than_3(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ registry_list = [
+ {
+ "@odata.id": "/redfish/v1/Registries/BaseMessages"
+ },
+ {
+ "@odata.id": "/redfish/v1/Registries/NetworkAttributesRegistry_NIC.Mezzanine.1A-1-1"
+ }]
+ location = [{'Uri': self.uri}]
+ registry_response = {'Attributes': [{
+ "AttributeName": "DeviceName",
+ "CurrentValue": None
+ },
+ {"AttributeName": "ChipMdl",
+ "CurrentValue": None
+ }
+ ]}
+ # Scenario 1: Got the registry Members list, Got Location, Got Attributes
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if args[2] == 'Members':
+ return registry_list
+ elif args[2] == 'Location':
+ return location
+ else:
+ return registry_response
+
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'Immediate'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_registry_fw_less_than_6_more_than_3()
+ assert data == {'ChipMdl': None, 'DeviceName': None}
+
+ # Scenario 2: Got the regisry Members empty
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if args[2] == 'Members':
+ return {}
+ elif args[2] == 'Location':
+ return location
+ else:
+ return registry_response
+
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'Immediate'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_registry_fw_less_than_6_more_than_3()
+ assert data == {}
+
+ # Scenario 3: Got the regisry Member but does not contain Location
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if args[2] == 'Members':
+ return registry_list
+ elif args[2] == 'Location':
+ return {}
+ else:
+ return registry_response
+
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'Immediate'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_registry_fw_less_than_6_more_than_3()
+ assert data == {}
+
+ def test_validate_time(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ resp = ("2022-09-14T05:59:35-05:00", "-05:00")
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_current_time",
+ return_value=resp)
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'Immediate'})
+ # Scenario 1: When mtime does not end with offset
+ m_time = "2022-09-14T05:59:35+05:00"
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__validate_time(m_time)
+ assert exc.value.args[0] == MAINTENACE_OFFSET_DIFF_MSG.format(resp[1])
+
+ # Scenario 2: When mtime is less than current time
+ m_time = "2021-09-14T05:59:35-05:00"
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__validate_time(m_time)
+ assert exc.value.args[0] == MAINTENACE_OFFSET_BEHIND_MSG
+
+ # Scenario 2: When mtime is greater than current time
+ m_time = "2024-09-14T05:59:35-05:00"
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__validate_time(m_time)
+ assert data is None
+
+ def test_get_redfish_apply_time(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ resp = ("2022-09-14T05:59:35-05:00", "-05:00")
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__validate_time",
+ return_value=resp)
+ rf_settings = [
+ "OnReset",
+ "Immediate"
+ ]
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'AtMaintenanceWindowStart',
+ 'maintenance_window': {"start_time": "2022-09-14T06:59:35-05:00",
+ "duration": 600}})
+
+ # Scenario 1: When Maintenance is not supported but 'AtMaintenanceWindowStart' is passed
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'AtMaintenanceWindowStart', rf_settings)
+ assert exc.value.args[0] == APPLY_TIME_NOT_SUPPORTED_MSG.format(
+ 'AtMaintenanceWindowStart')
+
+ # Scenario 2: When Maintenance is not supported but 'InMaintenanceWindowOnReset' is passed
+ idrac_default_args.update({'apply_time': 'InMaintenanceWindowOnReset'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'InMaintenanceWindowOnReset', rf_settings)
+ assert exc.value.args[0] == APPLY_TIME_NOT_SUPPORTED_MSG.format(
+ 'InMaintenanceWindowOnReset')
+
+ # Scenario 3: When ApplyTime does not support Maintenance
+ rf_settings.append('InMaintenanceWindowOnReset')
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'InMaintenanceWindowOnReset', rf_settings)
+ assert data == {'ApplyTime': 'InMaintenanceWindowOnReset',
+ 'MaintenanceWindowDurationInSeconds': 600,
+ 'MaintenanceWindowStartTime': '2022-09-14T06:59:35-05:00'}
+
+ # Scenario 4: When ApplyTime is Immediate
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'Immediate', rf_settings)
+ assert data == {'ApplyTime': 'Immediate'}
+
+ # Scenario 5: When ApplyTime does not support Immediate
+ rf_settings.remove('Immediate')
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'Immediate', rf_settings)
+ assert exc.value.args[0] == APPLY_TIME_NOT_SUPPORTED_MSG.format(
+ 'Immediate')
+
+ # Scenario 6: When AppyTime is empty
+ rf_settings = []
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_redfish_apply_time(
+ 'Immediate', rf_settings)
+ assert data == {}
+
+ def test_get_registry_fw_less_than_3(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ obj = MagicMock()
+ obj.json_data = {'SystemConfiguration': {
+ "Components": [
+ {'FQDD': 'NIC.Mezzanine.1A-1-1',
+ 'Attributes': [{
+ 'Name': 'VLanId',
+ 'Value': '10'
+ }]}
+ ]
+ }}
+ idrac_default_args.update(
+ {'network_device_function_id': 'NIC.Mezzanine.1A-1-1'})
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.export_scp",
+ return_value=obj)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__get_registry_fw_less_than_3()
+ assert data == {'VLanId': '10'}
+
+ def test_get_current_server_registry(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ reg_greater_than_6 = {'abc': False}
+ reg_less_than_6 = {'xyz': True}
+ reg_less_than_3 = {'Qwerty': False}
+ redfish_resp = {'Ethernet': {'abc': 123},
+ 'FibreChannel': {},
+ 'iSCSIBoot': {'ghi': 789}
+ }
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2:
+ if args[2] == 'Links':
+ return self.links
+ elif args[2] == 'Attributes':
+ return reg_greater_than_6
+ return redfish_resp
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__get_registry_fw_less_than_6_more_than_3",
+ return_value=reg_less_than_6)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__get_registry_fw_less_than_3",
+ return_value=reg_less_than_3)
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1A',
+ 'network_device_function_id': 'NIC.Mezzanine.1A-1-1',
+ 'apply_time': 'AtMaintenanceWindowStart',
+ 'maintenance_window': {"start_time": "2022-09-14T06:59:35-05:00",
+ "duration": 600}})
+
+ # Scenario 1: When Firmware version is greater and equal to 6.0 and oem_network_attributes is not given
+ firm_ver = '6.1'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value=firm_ver)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_current_server_registry()
+ assert data == {}
+
+ # Scenario 2: When Firmware version is greater and equal to 6.0 and oem_network_attributes is given
+ firm_ver = '6.1'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value=firm_ver)
+ idrac_default_args.update({'oem_network_attributes': 'some value'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_current_server_registry()
+ assert data == {'abc': False}
+
+ # Scenario 3: When Firmware version is less than 6.0 and oem_network_attributes is given
+ firm_ver = '4.0'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value=firm_ver)
+ idrac_default_args.update({'oem_network_attributes': 'some value'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_current_server_registry()
+ assert data == {'xyz': True}
+
+ # Scenario 4: When Firmware version is less than 3.0 and oem_network_attributes is given
+ firm_ver = '2.9'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value=firm_ver)
+ idrac_default_args.update({'oem_network_attributes': 'some value'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_current_server_registry()
+ assert data == {'Qwerty': False}
+
+ # Scenario 5: When network_attributes is given
+ firm_ver = '7.0'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value=firm_ver)
+ idrac_default_args.update({'network_attributes': 'some value',
+ 'oem_network_attributes': None})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_current_server_registry()
+ assert data == redfish_resp
+
+ def test_extract_error_msg(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ error_info = {
+ "error": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "AttributeValue cannot be changed to read only AttributeName BusDeviceFunction.",
+ "MessageArgs": [
+ "BusDeviceFunction"
+ ]
+ },
+ {
+ "Message": "AttributeValue cannot be changed to read only AttributeName ChipMdl.",
+ "MessageArgs": [
+ "ChipMdl"
+ ]
+ },
+ {
+ "Message": "AttributeValue cannot be changed to read only AttributeName ControllerBIOSVersion.",
+ "MessageArgs": [
+ "ControllerBIOSVersion"
+ ]
+ },
+ {
+ "Message": "some random message",
+ "MessageArgs": [
+ "ControllerBIOSVersion"
+ ]
+ }]}}
+ obj = MagicMock()
+ # Scenario 1: When response code is 202 and has response body
+ obj.body = obj.json_data = error_info
+ obj.status_code = 202
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.extract_error_msg(obj)
+ assert data == {'BusDeviceFunction': 'AttributeValue cannot be changed to read only AttributeName BusDeviceFunction.',
+ 'ChipMdl': 'AttributeValue cannot be changed to read only AttributeName ChipMdl.',
+ 'ControllerBIOSVersion': 'AttributeValue cannot be changed to read only AttributeName ControllerBIOSVersion.'
+ }
+
+ # Scenario 2: When response code is 200 and no response body
+ obj.body = obj.json_data = ''
+ obj.status_code = 200
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.extract_error_msg(obj)
+ assert data == {}
+
+ def test_get_diff_between_current_and_module_input(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ module_attr = {'a': 123, 'b': 456}
+ server_attr = {'c': 789, 'b': 456}
+ # Scenario 1: Simple attribute which does not contain nested values
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_diff_between_current_and_module_input(
+ module_attr, server_attr)
+ assert data == (0, {'a': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE})
+
+ # Scenario 2: Complex attribute which contain nested values
+ module_attr = {'a': 123, 'b': 456, 'c': {'d': 789}}
+ server_attr = {'c': 789, 'b': 457, 'd': {'e': 123}}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_diff_between_current_and_module_input(
+ module_attr, server_attr)
+ assert data == (2, {'a': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE})
+
+ # Scenario 3: Complex attribute which contain nested values and value matched
+ module_attr = {'a': 123, 'b': 456, 'c': {'d': 789}}
+ server_attr = {'c': {'d': 789}, 'b': 457, 'd': {'e': 123}}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_diff_between_current_and_module_input(
+ module_attr, server_attr)
+ assert data == (1, {'a': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE})
+
+ # Scenario 3: module attr is None
+ module_attr = None
+ server_attr = {'a': 123}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.get_diff_between_current_and_module_input(
+ module_attr, server_attr)
+ assert data == (0, {})
+
+ def test_perform_validation_for_network_adapter_id(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ netwkr_adapters = {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters"
+ }
+ network_adapter_list = [
+ {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1B"
+ }
+ ]
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if args[2] == 'NetworkInterfaces':
+ return netwkr_adapters
+ return network_adapter_list
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.validate_and_get_first_resource_id_uri",
+ return_value=('System.Embedded.1', ''))
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+
+ # Scenario 1: When network_adapter_id is in server network adapter list
+ idrac_default_args.update({'network_adapter_id': 'NIC.Mezzanine.1B'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__perform_validation_for_network_adapter_id()
+ assert data == "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1B"
+
+ # Scenario 2: When network_adapter_id is not in server network adapter list
+ network_adapter_id = 'random value'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.validate_and_get_first_resource_id_uri",
+ return_value=('System.Embedded.1', ''))
+ idrac_default_args.update({'network_adapter_id': network_adapter_id})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__perform_validation_for_network_adapter_id()
+ assert exc.value.args[0] == INVALID_ID_MSG.format(network_adapter_id,
+ 'network_adapter_id')
+
+ # Scenario 3: When validate_and_get_first_resource_id_uri is returning error_msg
+ network_adapter_id = 'random value'
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.validate_and_get_first_resource_id_uri",
+ return_value=('System.Embedded.1', 'error_msg'))
+ idrac_default_args.update({'network_adapter_id': network_adapter_id})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__perform_validation_for_network_adapter_id()
+ assert exc.value.args[0] == 'error_msg'
+
+ def test_perform_validation_for_network_device_function_id(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ netwkr_devices = {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions"
+ }
+ network_device_function_list = [
+ {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-1-1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-2-1"
+ }
+ ]
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if args[2] == 'NetworkDeviceFunctions':
+ return netwkr_devices
+ return network_device_function_list
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.validate_and_get_first_resource_id_uri",
+ return_value=('System.Embedded.1', ''))
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__perform_validation_for_network_adapter_id",
+ return_value=self.uri)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+
+ # Scenario 1: When network_adapter_id is in server network adapter list
+ device_uri = "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-2-1"
+ idrac_default_args.update(
+ {'network_device_function_id': 'NIC.Mezzanine.1A-2-1'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj._IDRACNetworkAttributes__perform_validation_for_network_device_function_id()
+ assert data == device_uri
+
+ # Scenario 2: When network_adapter_id is not in server network adapter list
+ network_device_function_id = 'random value'
+ idrac_default_args.update(
+ {'network_device_function_id': network_device_function_id})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj._IDRACNetworkAttributes__perform_validation_for_network_device_function_id()
+ assert exc.value.args[0] == INVALID_ID_MSG.format(
+ network_device_function_id, 'network_device_function_id')
+
+ def test_validate_job_timeout(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+
+ # Scenario 1: when job_wait is True and job_wait_timeout is in negative
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': -120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_job_timeout()
+ assert exc.value.args[0] == TIMEOUT_NEGATIVE_OR_ZERO_MSG
+
+ # Scenario 2: when job_wait is False
+ idrac_default_args.update(
+ {'job_wait': False, 'job_wait_timeout': -120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.validate_job_timeout()
+ assert data is None
+
+ def test_apply_time(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ return_value=self.redfish_settings)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__get_redfish_apply_time",
+ return_value={'AppyTime': "OnReset"})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ rf_set = idr_obj.apply_time(self.uri)
+ assert rf_set == {'AppyTime': "OnReset"}
+
+ def test_set_dynamic_base_uri_and_validate_ids(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ tmp_dict = {}
+ tmp_dict.update({'Links': self.links,
+ '@Redfish.Settings': self.redfish_settings.get('@Redfish.Settings')})
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ return_value=tmp_dict)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes._IDRACNetworkAttributes__perform_validation_for_network_device_function_id",
+ return_value=self.uri)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.IDRACNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.set_dynamic_base_uri_and_validate_ids()
+ assert data is None
+
+ def test_clear_pending(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ action_setting_uri_resp = {
+ "Actions": {
+ "#DellManager.ClearPending": {
+ "target": "/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/NIC.Mezzanine.1A/NetworkDeviceFunctions/NIC.Mezzanine.1A-1-1/Oem/Dell/" +
+ "DellNetworkAttributes/NIC.Mezzanine.1A-1-1/Settings/Actions/DellManager.ClearPending"
+ }
+ },
+ "Attributes": {}
+ }
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == '@Redfish.Settings':
+ return self.redfish_settings.get('@Redfish.Settings')
+ return action_setting_uri_resp
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='6.1')
+
+ # Scenario 1: When there's no pending attributes
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == NO_CHANGES_FOUND_MSG
+
+ # Scenario 2: When there's pending attributes and scheduled_job is running in normal mode
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_scheduled_job_resp",
+ return_value={'Id': 'JIDXXXXXX', 'JobState': 'Running'})
+ action_setting_uri_resp.update({'Attributes': {'VLanId': 10}})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == JOB_RUNNING_CLEAR_PENDING_ATTR.format(
+ 'NICConfiguration')
+
+ # Scenario 3: When there's pending attributes and scheduled_job is Starting in normal mode
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_scheduled_job_resp",
+ return_value={'Id': 'JIDXXXXXX', 'JobState': 'Starting'})
+ action_setting_uri_resp.update({'Attributes': {'VLanId': 10}})
+ g_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr__obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, g_module)
+ with pytest.raises(Exception) as exc:
+ idr__obj.clear_pending()
+ assert exc.value.args[0] == SUCCESS_CLEAR_PENDING_ATTR_MSG
+
+ # Scenario 4: Scenario 3 in check mode
+ g_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idr__obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, g_module)
+ with pytest.raises(Exception) as exc:
+ idr__obj.clear_pending()
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 5: When there's pending attribute but no job id is present in normal mode
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_scheduled_job_resp",
+ return_value={'Id': '', 'JobState': 'Starting'})
+ g_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, g_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == SUCCESS_CLEAR_PENDING_ATTR_MSG
+
+ # Scenario 6: Scenario 5 in check_mode
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 7: When Job is completed in check mode, ideally won't get this condition
+ # as function will return only scheduled job
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_scheduled_job_resp",
+ return_value={'Id': 'JIDXXXXXX', 'JobState': 'Completed'})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 8: When Firmware version is less 3 and oem_network_attribute is not given
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='2.9')
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == CLEAR_PENDING_NOT_SUPPORTED_WITHOUT_ATTR_IDRAC8
+
+ # Scenario 9: When Firmware version is less 3 and oem_network_attribute is given
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='2.9')
+ idrac_default_args.update(
+ {'oem_network_attributes': {'somedata': 'somevalue'}})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.clear_pending()
+ assert data is None
+
+ # Scenario 10: When Fw vers is greater than 3, job exists, in starting, normal mode, without oem_network_attribute
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='3.1')
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_scheduled_job_resp",
+ return_value={'Id': 'JIDXXXXXX', 'JobState': 'Starting'})
+ idrac_default_args.update({'oem_network_attributes': None})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.clear_pending()
+ assert exc.value.args[0] == SUCCESS_CLEAR_PENDING_ATTR_MSG
+
+ def test_perform_operation_OEMNetworkAttributes(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ obj = MagicMock()
+ obj.headers = {'Location': self.uri}
+ obj.json_data = {'data': 'some value'}
+ apply_time = {'ApplyTime': 'Immediate'}
+ error_info = {'abc': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE}
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == 'Links':
+ return self.links
+ return self.redfish_settings
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.invoke_request",
+ return_value=obj)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.import_scp",
+ return_value=obj)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.apply_time",
+ return_value=apply_time)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.extract_error_msg",
+ return_value=error_info)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='6.1')
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.idrac_redfish_job_tracking",
+ return_value=(False, 'msg', obj.json_data, 600))
+
+ idrac_default_args.update({'oem_network_attributes': {'VlanId': 1},
+ 'job_wait': True,
+ 'job_wait_timeout': 1200})
+ # Scenario 1: When Job has returned successfully and not error msg is there
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.OEMNetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ data = idr_obj.perform_operation()
+ assert data == (obj, {
+ 'abc': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE}, False)
+
+ def test_perform_operation_NetworkAttributes(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ obj = MagicMock()
+ obj.headers = {'Location': self.uri}
+ obj.json_data = {'data': 'some value'}
+ apply_time = {'ApplyTime': 'Immediate'}
+ error_info = {'abc': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE}
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == 'Links':
+ return self.links
+ return self.redfish_settings
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.invoke_request",
+ return_value=obj)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.import_scp",
+ return_value=obj)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.apply_time",
+ return_value=apply_time)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.extract_error_msg",
+ return_value=error_info)
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.idrac_redfish_job_tracking",
+ return_value=(False, 'msg', obj.json_data, 500))
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='6.1')
+
+ idrac_default_args.update({'network_attributes': {'VlanId': 1},
+ 'job_wait': True,
+ 'job_wait_timeout': 1200})
+ # Scenario 1: When Job has returned successfully and not error msg is there
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.NetworkAttributes(
+ idrac_connection_ntwrk_attr_mock, f_module)
+ idr_obj.redfish_uri = self.uri
+ data = idr_obj.perform_operation()
+ assert data == (obj, {
+ 'abc': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE}, False)
+
+ def test_perform_operation_for_main(self, idrac_default_args, idrac_connection_ntwrk_attr_mock,
+ idrac_ntwrk_attr_mock, mocker):
+ obj = MagicMock()
+ obj.json_data = {'some': 'value'}
+ job_state = {'JobState': "Completed"}
+ invalid_attr = {'a': ATTRIBUTE_NOT_EXIST_CHECK_IDEMPOTENCY_MODE}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.idrac_redfish_job_tracking",
+ return_value=(False, 'some msg', job_state, 700))
+ # Scenario 1: When diff is false
+ diff = 0
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == NO_CHANGES_FOUND_MSG
+
+ # Scenario 2: When diff is True and check mode is True
+ diff = ({'a': 123}, {'c': 789})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 3: When diff is True and JobState is completed and
+ # There is invalid_attr in normal mode
+ resp = MagicMock()
+ resp.headers = {'Location': self.uri}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.get_idrac_firmware_version",
+ return_value='6.1')
+
+ def return_data():
+ return (resp, invalid_attr, False)
+ obj.perform_operation = return_data
+ obj.json_data = {'JobState': 'Completed'}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.invoke_request",
+ return_value=obj)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == VALID_AND_INVALID_ATTR_MSG
+
+ # Scenario 4: When diff is True and JobState is completed and
+ # There is no invalid_attr in normal mode
+ invalid_attr = {}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == SUCCESS_MSG
+
+ # Scenario 5: When diff is True and JobState is not completed and
+ # There is no invalid_attr in normal mode
+ invalid_attr = {}
+
+ def return_data():
+ return (resp, invalid_attr, False)
+ obj.json_data = {'JobState': "Scheduled"}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.iDRACRedfishAPI.invoke_request",
+ return_value=obj)
+ obj.perform_operation = return_data
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == SCHEDULE_MSG
+
+ # Scenario 6: When diff is False and check mode is there
+ diff = 0
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == NO_CHANGES_FOUND_MSG
+
+ # Scenario 7: When diff is False and check mode is False, invalid is False
+ diff = 0
+ invalid_attr = {}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ f_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == NO_CHANGES_FOUND_MSG
+
+ # Scenario 8: When Job_wait is True and wait time is less
+ diff = 1
+ invalid_attr = {}
+ resp = MagicMock()
+ resp.headers = {'Location': self.uri}
+
+ def return_data():
+ return (resp, invalid_attr, True)
+ obj.perform_operation = return_data
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.idrac_redfish_job_tracking",
+ return_value=(False, 'msg', obj.json_data, 1200))
+ idrac_default_args.update({'job_wait_timeout': 1000})
+ h_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_operation_for_main(idrac_connection_ntwrk_attr_mock,
+ h_module, obj, diff, invalid_attr)
+ assert exc.value.args[0] == WAIT_TIMEOUT_MSG.format(1000)
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_network_attributes_main_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_ntwrk_attr_mock, idrac_ntwrk_attr_mock):
+ obj = MagicMock()
+ obj.perform_validation_for_network_adapter_id.return_value = None
+ obj.perform_validation_for_network_device_function_id.return_value = None
+ obj.get_diff_between_current_and_module_input.return_value = (
+ None, None)
+ obj.validate_job_timeout.return_value = None
+ obj.clear_pending.return_value = None
+ idrac_default_args.update({'apply_time': "Immediate",
+ 'network_adapter_id': 'Some_adapter_id',
+ 'network_device_function_id': 'some_device_id',
+ 'clear_pending': True if exec == 'URLError' else False})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ tmp = {'network_attributes': {'VlanId': 10}}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.set_dynamic_base_uri_and_validate_ids",
+ side_effect=exc_type('https://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ else:
+
+ tmp = {'oem_network_attributes': {'VlanId': 10}}
+ mocker.patch(MODULE_PATH + "idrac_network_attributes.IDRACNetworkAttributes.set_dynamic_base_uri_and_validate_ids",
+ side_effect=exc_type('test'))
+ idrac_default_args.update(tmp)
+ result = self._run_module(idrac_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ else:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py
index d89673566..741aa83a3 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_os_deployment.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -14,10 +14,9 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_os_deployment
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args, exit_json, \
- fail_json, AnsibleFailJson, AnsibleExitJson
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.utils import set_module_args
from pytest import importorskip
importorskip("omsdk.sdkfile")
@@ -49,7 +48,7 @@ class TestOsDeployment(FakeAnsibleModule):
@pytest.fixture
def omsdk_mock(self, mocker):
mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.UserCredentials')
- mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.WsManOptions')
+ mocker.patch(MODULE_UTIL_PATH + 'dellemc_idrac.ProtoPreference')
@pytest.fixture
def fileonshare_mock(self, mocker):
@@ -100,7 +99,7 @@ class TestOsDeployment(FakeAnsibleModule):
idrac_mock.config_mgr.boot_to_network_iso.return_value = {"Status": "Success"}
params = {"idrac_ip": "idrac_ip", "idrac_user": "idrac_user", "idrac_password": "idrac_password",
"ca_path": "/path/to/ca_cert.pem",
- "share_name": None, "share_password": "dummy_share_password",
+ "share_name": "", "share_password": "dummy_share_password",
"iso_image": "dummy_iso_image", "expose_duration": "100"
}
set_module_args(params)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py
index 99185a933..342bd51fe 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.3.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 8.1.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -15,14 +15,15 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_redfish_storage_controller
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
-from ansible.module_utils.urls import urllib_error
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+HTTPS_ADDRESS = 'https://testhost.com'
+HTTP_ERROR_MSG = 'http error message'
@pytest.fixture
@@ -38,7 +39,7 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
module = idrac_redfish_storage_controller
def test_check_id_exists(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password"}
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password"}
uri = "/redfish/v1/Dell/Systems/{system_id}/Storage/DellController/{controller_id}"
f_module = self.get_module_mock(params=param)
redfish_response_mock.success = True
@@ -46,6 +47,7 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
result = self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id",
"RAID.Integrated.1-1", uri)
assert result is None
+
redfish_response_mock.success = False
redfish_response_mock.status_code = 400
with pytest.raises(Exception) as ex:
@@ -53,13 +55,25 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
"RAID.Integrated.1-1", uri)
assert ex.value.args[0] == "controller_id with id 'RAID.Integrated.1-1' not found in system"
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.check_id_exists(f_module, redfish_str_controller_conn, "controller_id",
+ "RAID.Integrated.1-1", uri)
+ assert ex.value.args[0] == "controller_id with id 'RAID.Integrated.1-1' not found in system"
+
def test_validate_inputs(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "ReKey", "mode": "LKM"}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as ex:
self.module.validate_inputs(f_module)
assert ex.value.args[0] == "All of the following: key, key_id and old_key are required for 'ReKey' operation."
+
param.update({"command": "AssignSpare", "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1",
"Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"]})
f_module = self.get_module_mock(params=param)
@@ -67,18 +81,21 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
self.module.validate_inputs(f_module)
assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
"physical disk must be only one."
+
param.update({"volume_id": ["Disk.Virtual.0:RAID.Mezzanine.1C-0",
"Disk.Virtual.0:RAID.Mezzanine.1C-1"], "target": None})
with pytest.raises(Exception) as ex:
self.module.validate_inputs(f_module)
assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
"virtual drive must be only one."
+
param.update({"command": "EnableControllerEncryption"})
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as ex:
self.module.validate_inputs(f_module)
assert ex.value.args[0] == "All of the following: key, key_id are " \
"required for 'EnableControllerEncryption' operation."
+
param.update({"command": "ChangePDStateToOnline",
"target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1",
"Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1"]})
@@ -87,8 +104,37 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target " \
"physical disk must be only one."
+ param.update({"key": "Key@123", "key_id": 123, "old_key": "abc",
+ "command": "ReKey", "mode": "LKM"})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.validate_inputs(f_module)
+ assert result is None
+
+ param.update({"key": "Key@123", "key_id": 123,
+ "command": "EnableControllerEncryption", "mode": "LKM"})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.validate_inputs(f_module)
+ assert result is None
+
+ param.update({"volume_id": None, "command": "AssignSpare",
+ "target": ["Disk.Bay.0:Enclosure.Internal.0-2:RAID.Integrated.1-1"]})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.validate_inputs(f_module)
+ assert result is None
+
+ param.update({"command": "ChangePDStateToOnline",
+ "target": None})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.validate_inputs(f_module)
+ assert result is None
+
+ param.update({"command": "NoCommand"})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.validate_inputs(f_module)
+ assert result is None
+
def test_target_identify_pattern(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "BlinkTarget", "target": "Disk.Bay.1:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
"volume_id": "Disk.Virtual.0:RAID.Mezzanine.1C-1"}
f_module = self.get_module_mock(params=param)
@@ -96,13 +142,29 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
redfish_response_mock.status_code = 200
result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
assert result.status_code == 200
+
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+ param.update({"volume_id": None})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
+ assert result.status_code == 200
+
+ param.update({"target": None})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
+ assert result.status_code == 200
+
+ param.update({"volume_id": "Disk.Virtual.0:RAID.Mezzanine.1C-1"})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.target_identify_pattern(f_module, redfish_str_controller_conn)
+ assert result.status_code == 200
+
def test_ctrl_reset_config(self, redfish_str_controller_conn, redfish_response_mock, mocker):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"controller_id": "RAID.Mezzanine.1C-1", "command": "ResetConfig"}
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
@@ -120,24 +182,41 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
assert ex.value.args[0] == "No changes found to be applied."
def test_hot_spare_config(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
- "command": "AssignSpare", "target": "Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"}
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "command": "AssignSpare", "target": ["Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"]}
f_module = self.get_module_mock(params=param)
redfish_response_mock.json_data = {"HotspareType": "None"}
redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
result = self.module.hot_spare_config(f_module, redfish_str_controller_conn)
assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ param.update({"volume_id": 'Disk.Virtual.0:RAID.Slot.1-1'})
+ f_module = self.get_module_mock(params=param)
+ result = self.module.hot_spare_config(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.hot_spare_config(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
redfish_response_mock.json_data = {"HotspareType": "Global"}
with pytest.raises(Exception) as ex:
self.module.hot_spare_config(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.hot_spare_config(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Unable to locate the physical disk with the ID: Disk.Bay.1:Enclosure.Internal.0-2:RAID.Integrated.1-1"
+
def test_ctrl_key(self, redfish_str_controller_conn, redfish_response_mock, mocker):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "SetControllerKey", "controller_id": "RAID.Integrated.1-1", "mode": "LKM"}
mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
f_module = self.get_module_mock(params=param)
@@ -145,49 +224,81 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "The storage controller 'RAID.Integrated.1-1' does not support encryption."
+
f_module.check_mode = True
redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": "Key@123"}
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+
+ param.update({"command": "ReKey"})
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
- param.update({"command": "ReKey"})
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ param.update({"mode": "LKM_"})
+ f_module.check_mode = False
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
param.update({"command": "RemoveControllerKey"})
+ redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": 'Key@123'}
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
- param.update({"command": "EnableControllerEncryption"})
+
+ param.update({"command": "EnableControllerEncryption", "mode": "LKM"})
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
redfish_response_mock.json_data = {"SecurityStatus": "SecurityKeyAssigned", "KeyID": None}
with pytest.raises(Exception) as ex:
self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+
f_module.check_mode = False
redfish_response_mock.json_data = {"SecurityStatus": "EncryptionCapable", "KeyID": None}
redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
assert result[2] == "JID_XXXXXXXXXXXXX"
+ param.update({"mode": "LKM_"})
+ result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ param.update({"command": "wrongCommand", "mode": "LKM"})
+ f_module = self.get_module_mock(params=param)
+ f_module.check_mode = True
+ result = self.module.ctrl_key(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
def test_convert_raid_status(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "ConvertToRAID", "target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1",
"Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
f_module = self.get_module_mock(params=param)
@@ -195,18 +306,30 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
result = self.module.convert_raid_status(f_module, redfish_str_controller_conn)
assert result[2] == "JID_XXXXXXXXXXXXX"
+
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.convert_raid_status(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
f_module.check_mode = False
redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Ready"}}}}
with pytest.raises(Exception) as ex:
self.module.convert_raid_status(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.convert_raid_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Unable to locate the physical disk with the ID: Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1"
+
def test_change_pd_status(self, redfish_str_controller_conn, redfish_response_mock):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "ChangePDStateToOnline",
"target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1",
"Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
@@ -215,41 +338,602 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
result = self.module.change_pd_status(f_module, redfish_str_controller_conn)
assert result[2] == "JID_XXXXXXXXXXXXX"
+
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.change_pd_status(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
f_module.check_mode = False
redfish_response_mock.json_data = {"Oem": {"Dell": {"DellPhysicalDisk": {"RaidStatus": "Online"}}}}
with pytest.raises(Exception) as ex:
self.module.change_pd_status(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.change_pd_status(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Unable to locate the physical disk with the ID: Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1"
+
def test_lock_virtual_disk(self, redfish_str_controller_conn, redfish_response_mock, mocker):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "LockVirtualDisk",
- "volume_id": "Disk.Virtual.0:RAID.SL.3-1"}
+ "volume_id": ["Disk.Virtual.0:RAID.SL.3-1"]
+ }
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
redfish_response_mock.json_data = {"Oem": {"Dell": {"DellVolume": {"LockStatus": "Unlocked"}}}}
redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
result = self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
assert result[2] == "JID_XXXXXXXXXXXXX"
+
f_module.check_mode = True
with pytest.raises(Exception) as ex:
self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "Changes found to be applied."
+
f_module.check_mode = False
redfish_response_mock.json_data = {"Oem": {"Dell": {"DellVolume": {"LockStatus": "Locked"}}}}
with pytest.raises(Exception) as ex:
self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
assert ex.value.args[0] == "No changes found to be applied."
+ redfish_response_mock.json_data = {"Oem": {"Dell": {"DellVolume": {"LockStatus": "Unlocked"}}},
+ "Links": {
+ "Drives": [
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/"
+ },
+ {
+ "@odata.id": "/redfish/v1/Systems/System.Embedded.1/"
+ }],
+ "Drives@odata.count": 2}}
+ with pytest.raises(Exception) as ex:
+ self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Volume is not encryption capable."
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.lock_virtual_disk(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Unable to locate the physical disk with the ID: RAID.SL.3-1"
+
+ def test_online_capacity_expansion_raid_type_error(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "command": "OnlineCapacityExpansion",
+ "volume_id": ["Disk.Virtual.0:RAID.SL.3-1"],
+ "target": ["Disk.Bay.2:Enclosure.Internal.0-0:RAID.Integrated.1-1"]}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_response_mock.json_data = {"RAIDType": "RAID50"}
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Online Capacity Expansion is not supported for RAID50 virtual disks."
+
+ redfish_response_mock.json_data = {"RAIDType": "RAID1"}
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Cannot add more than two disks to RAID1 virtual disk."
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Unable to locate the virtual disk with the ID: Disk.Virtual.0:RAID.SL.3-1"
+
+ def test_online_capacity_expansion_empty_target(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "command": "OnlineCapacityExpansion",
+ "volume_id": ["Disk.Virtual.0:RAID.SL.3-1"],
+ "target": []}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_response_mock.json_data = {"Links": {"Drives": [{"@odata.id": "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"}]}}
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Provided list of targets is empty."
+
+ param.update({"volume_id": [], "target": None, "size": 3500})
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "The Fully Qualified Device Descriptor (FQDD) of the target virtual drive must be only one."
+
+ def test_online_capacity_expansion_valid_target(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "command": "OnlineCapacityExpansion",
+ "volume_id": "Disk.Virtual.0:RAID.SL.3-1",
+ "target": ["Disk.Bay.2:Enclosure.Internal.0-0:RAID.Integrated.1-1",
+ "Disk.Bay.3:Enclosure.Internal.0-0:RAID.Integrated.1-1",
+ "Disk.Bay.4:Enclosure.Internal.0-0:RAID.Integrated.1-1"]}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_response_mock.json_data = {"Links": {"Drives": [{"@odata.id": "/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"}]},
+ "RAIDType": "RAID0"}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Changes found to be applied."
+
+ f_module.check_mode = False
+ result = self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ param.update({"target": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"]})
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.json_data = {"RAIDType": "RAID10"}
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "No changes found to be applied."
+
+ def test_online_capacity_expansion_size(self, redfish_str_controller_conn, redfish_response_mock, mocker):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "command": "OnlineCapacityExpansion",
+ "volume_id": ["Disk.Virtual.0:RAID.SL.3-1"],
+ "size": 3010}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "idrac_redfish_storage_controller.check_id_exists", return_value=None)
+ redfish_response_mock.json_data = {"CapacityBytes": 3145728000}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ with pytest.raises(Exception) as ex:
+ self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert ex.value.args[0] == "Minimum Online Capacity Expansion size must be greater than 100 MB of the current size 3000."
+
+ param.update({"size": 3500})
+ result = self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ param.update({"size": None})
+ result = self.module.online_capacity_expansion(f_module, redfish_str_controller_conn)
+ assert result[2] == "JID_XXXXXXXXXXXXX"
+
+ def test_get_current_time(self, redfish_str_controller_conn, redfish_response_mock):
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ resp = self.module.get_current_time(redfish_str_controller_conn)
+ assert resp[0] == "2023-01-09T01:23:40-06:00"
+ assert resp[1] == "-06:00"
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ resp = self.module.get_current_time(redfish_str_controller_conn)
+ assert resp[0] is None
+ assert resp[1] is None
+
+ def test_validate_time(self, redfish_str_controller_conn, redfish_response_mock, redfish_default_args):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "InMaintenanceWindowOnReset",
+ "maintenance_window": {"start_time": "2023-09-30T05:15:40-06:00", "duration": 900}}
+ redfish_default_args.update(param)
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ with pytest.raises(Exception):
+ result = self.module.validate_time(f_module, redfish_str_controller_conn, "2023-01-09T01:23:40-05:00")
+ assert result["msg"] == "The maintenance time must be post-fixed with local offset to -05:00."
+
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ with pytest.raises(Exception):
+ result = self.module.validate_time(f_module, redfish_str_controller_conn, "2022-01-09T01:23:40-05:00")
+ assert result["msg"] == "The specified maintenance time window occurs in the past, provide a future time" \
+ " to schedule the maintenance window."
+
+ redfish_response_mock.json_data = {"DateTime": "2023-10-09T01:23:40+06:00", "DateTimeLocalOffset": "+06:00"}
+ with pytest.raises(Exception):
+ result = self.module.validate_time(f_module, redfish_str_controller_conn, "2023-09-09T01:23:40+06:00")
+ assert result["msg"] == "The specified maintenance time window occurs in the past, provide a future time" \
+ " to schedule the maintenance window."
+
+ def test_check_attr_exists(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "InMaintenanceWindowOnReset",
+ "maintenance_window": {"start_time": "2023-09-30T05:15:40-06:00", "duration": 900}}
+ curr_attr = {"ControllerMode": "RAID", "CheckConsistencyMode": "StopOnError", "LoadBalanceMode": "Automatic"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.status_code = 200
+ result = self.module.check_attr_exists(f_module, curr_attr, param["attributes"])
+ assert result["CheckConsistencyMode"] == "Normal"
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.check_attr_exists(f_module, curr_attr, param["attributes"])
+ assert ex.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ with pytest.raises(Exception) as ex:
+ self.module.check_attr_exists(f_module, curr_attr, {"ControllerMode": "RAID",
+ "CheckConsistencyMode": "StopOnError"})
+ assert ex.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = False
+ with pytest.raises(Exception) as ex:
+ self.module.check_attr_exists(f_module, curr_attr, {"ControllerMode": "RAID",
+ "CheckConsistency": "StopOnError"})
+ assert ex.value.args[0] == "The following attributes are invalid: ['CheckConsistency']"
+
+ def test_get_attributes(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "InMaintenanceWindowOnReset",
+ "maintenance_window": {"start_time": "2023-09-30T05:15:40-06:00", "duration": 900}}
+ resp = {"@Redfish.Settings": {"SupportedApplyTimes": ["Immediate", "OnReset", "AtMaintenanceWindowStart",
+ "InMaintenanceWindowOnReset"]},
+ "Id": "RAID.Integrated.1-1",
+ "Oem": {
+ "Dell": {
+ "DellStorageController": {
+ "AlarmState": "AlarmNotPresent",
+ "AutoConfigBehavior": "NotApplicable",
+ "BackgroundInitializationRatePercent": 30,
+ "BatteryLearnMode": "null",
+ "BootVirtualDiskFQDD": "null",
+ "CacheSizeInMB": 2048,
+ "CachecadeCapability": "NotSupported",
+ "CheckConsistencyMode": "StopOnError",
+ "ConnectorCount": 2,
+ "ControllerBootMode": "ContinueBootOnError",
+ "ControllerFirmwareVersion": "25.5.9.0001",
+ "ControllerMode": "RAID",
+ "CopybackMode": "OnWithSMART",
+ "CurrentControllerMode": "RAID",
+ "Device": "0",
+ "DeviceCardDataBusWidth": "Unknown",
+ "DeviceCardSlotLength": "Unknown",
+ "DeviceCardSlotType": "Unknown",
+ "DriverVersion": "6.706.06.00",
+ "EncryptionCapability": "LocalKeyManagementCapable",
+ "EncryptionMode": "LocalKeyManagement",
+ "EnhancedAutoImportForeignConfigurationMode": "Disabled",
+ "KeyID": "MyNewKey@123",
+ "LastSystemInventoryTime": "2022-12-23T04:59:41+00:00",
+ "LastUpdateTime": "2022-12-23T17:59:44+00:00",
+ "LoadBalanceMode": "Automatic",
+ "MaxAvailablePCILinkSpeed": "Generation 3",
+ "MaxDrivesInSpanCount": 32,
+ "MaxPossiblePCILinkSpeed": "Generation 3",
+ "MaxSpansInVolumeCount": 8,
+ "MaxSupportedVolumesCount": 64,
+ "PCISlot": "null",
+ "PatrolReadIterationsCount": 0,
+ "PatrolReadMode": "Automatic",
+ "PatrolReadRatePercent": 30,
+ "PatrolReadState": "Stopped",
+ "PatrolReadUnconfiguredAreaMode": "Enabled",
+ "PersistentHotspare": "Disabled",
+ "PersistentHotspareMode": "Disabled",
+ "RAIDMode": "None",
+ "RealtimeCapability": "Capable",
+ "ReconstructRatePercent": 30,
+ "RollupStatus": "OK",
+ "SASAddress": "54CD98F0760C3D00",
+ "SecurityStatus": "SecurityKeyAssigned",
+ "SharedSlotAssignmentAllowed": "NotApplicable",
+ "SlicedVDCapability": "Supported",
+ "SpindownIdleTimeSeconds": 30,
+ "SupportControllerBootMode": "Supported",
+ "SupportEnhancedAutoForeignImport": "Supported",
+ "SupportRAID10UnevenSpans": "Supported",
+ "SupportedInitializationTypes": [
+ "Slow",
+ "Fast"],
+ "SupportedInitializationTypes@odata.count": 2,
+ "SupportsLKMtoSEKMTransition": "No",
+ "T10PICapability": "NotSupported"
+ }
+ }}}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = resp
+ result = self.module.get_attributes(f_module, redfish_str_controller_conn)
+ assert result == resp
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ resp = self.module.get_attributes(f_module, redfish_str_controller_conn)
+ assert resp == {}
+
+ def test_get_redfish_apply_time(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "InMaintenanceWindowOnReset",
+ "maintenance_window": {"start_time": "2023-09-30T05:15:40-06:00", "duration": 900}}
+ time_settings = ["Immediate", "OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ result = self.module.get_redfish_apply_time(f_module, redfish_str_controller_conn, param["apply_time"],
+ time_settings)
+ assert result['ApplyTime'] == param['apply_time']
+ assert result['MaintenanceWindowDurationInSeconds'] == 900
+ assert result['MaintenanceWindowStartTime'] == '2023-09-30T05:15:40-06:00'
+
+ param1 = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "InMaintenanceWindowOnReset",
+ "maintenance_window": {"start_time": "2023-09-30T05:15:40-06:00", "duration": 900}}
+ f_module = self.get_module_mock(params=param1)
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ result = self.module.get_redfish_apply_time(f_module, redfish_str_controller_conn, param1["apply_time"],
+ time_settings)
+ assert result['ApplyTime'] == param1['apply_time']
+
+ result = self.module.get_redfish_apply_time(f_module, redfish_str_controller_conn,
+ param1["apply_time"], [])
+ assert result == {}
+
+ with pytest.raises(Exception):
+ result = self.module.get_redfish_apply_time(f_module, redfish_str_controller_conn,
+ param1["apply_time"], ['NotEmpty'])
+ assert result["status_msg"] == "Apply time InMaintenanceWindowOnReset is not supported."
+
+ def test_apply_attributes(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1",
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "Immediate"}
+ time_settings = ["Immediate", "OnReset", "AtMaintenanceWindowStart", "InMaintenanceWindowOnReset"]
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"DateTime": "2023-01-09T01:23:40-06:00", "DateTimeLocalOffset": "-06:00"}
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ job_id, time_set = self.module.apply_attributes(f_module, redfish_str_controller_conn,
+ {"CheckConsistencyMode": "StopOnError"},
+ time_settings)
+ assert job_id == "JID_XXXXXXXXXXXXX"
+ assert time_set == {'ApplyTime': "Immediate"}
+
+ redfish_response_mock.status_code = 202
+ redfish_response_mock.json_data = {"error": {"@Message.ExtendedInfo": [
+ {"Message": "The value 'abcd' for the property PatrolReadMode is not in the list of acceptable values.",
+ "MessageArgs": ["abcd", "PatrolReadMode"], "MessageArgs@odata.count": 2,
+ "MessageId": "Base.1.12.PropertyValueNotInList",
+ "RelatedProperties": ["#/Oem/Dell/DellStorageController/PatrolReadMode"],
+ "RelatedProperties@odata.count": 1,
+ "Resolution": "Choose a value from the enumeration list that the implementation can support and"
+ "resubmit the request if the operation failed.", "Severity": "Warning"}
+ ]}}
+ with pytest.raises(Exception) as ex:
+ self.module.apply_attributes(f_module, redfish_str_controller_conn, {"CheckConsistencyMode": "StopOnError"},
+ time_settings)
+ assert ex.value.args[0] == "Unable to configure the controller attribute(s) settings."
+
+ time_settings = []
+ with pytest.raises(Exception) as ex:
+ job_id, time_set = self.module.apply_attributes(f_module, redfish_str_controller_conn,
+ {"CheckConsistencyMode": "StopOnError"},
+ time_settings)
+ assert job_id == "JID_XXXXXXXXXXXXX"
+ assert time_set == {}
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ redfish_str_controller_conn.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as ex:
+ self.module.apply_attributes(f_module, redfish_str_controller_conn, {"CheckConsistencyMode": "StopOnError"},
+ time_settings)
+ assert ex.value.args[0] == "Unable to configure the controller attribute(s) settings."
+
+ def test_set_attributes(self, redfish_str_controller_conn, redfish_response_mock):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": "RAID.Integrated.1-1", "attributes": {"ControllerMode": "HBA"},
+ "job_wait": True, "apply_time": "Immediate"}
+ resp = {"@Redfish.Settings": {"SupportedApplyTimes": ["Immediate", "OnReset", "AtMaintenanceWindowStart",
+ "InMaintenanceWindowOnReset"]},
+ "Id": "RAID.Integrated.1-1",
+ "Oem": {
+ "Dell": {
+ "DellStorageController": {
+ "AlarmState": "AlarmNotPresent",
+ "AutoConfigBehavior": "NotApplicable",
+ "BackgroundInitializationRatePercent": 30,
+ "BatteryLearnMode": "null",
+ "BootVirtualDiskFQDD": "null",
+ "CacheSizeInMB": 2048,
+ "CachecadeCapability": "NotSupported",
+ "CheckConsistencyMode": "StopOnError",
+ "ConnectorCount": 2,
+ "ControllerBootMode": "ContinueBootOnError",
+ "ControllerFirmwareVersion": "25.5.9.0001",
+ "ControllerMode": "RAID",
+ "CopybackMode": "OnWithSMART",
+ "CurrentControllerMode": "RAID",
+ "Device": "0",
+ "DeviceCardDataBusWidth": "Unknown",
+ "DeviceCardSlotLength": "Unknown",
+ "DeviceCardSlotType": "Unknown",
+ "DriverVersion": "6.706.06.00",
+ "EncryptionCapability": "LocalKeyManagementCapable",
+ "EncryptionMode": "LocalKeyManagement",
+ "EnhancedAutoImportForeignConfigurationMode": "Disabled",
+ "KeyID": "MyNewKey@123",
+ "LastSystemInventoryTime": "2022-12-23T04:59:41+00:00",
+ "LastUpdateTime": "2022-12-23T17:59:44+00:00",
+ "LoadBalanceMode": "Automatic",
+ "MaxAvailablePCILinkSpeed": "Generation 3",
+ "MaxDrivesInSpanCount": 32,
+ "MaxPossiblePCILinkSpeed": "Generation 3",
+ "MaxSpansInVolumeCount": 8,
+ "MaxSupportedVolumesCount": 64,
+ "PCISlot": "null",
+ "PatrolReadIterationsCount": 0,
+ "PatrolReadMode": "Automatic",
+ "PatrolReadRatePercent": 30,
+ "PatrolReadState": "Stopped",
+ "PatrolReadUnconfiguredAreaMode": "Enabled",
+ "PersistentHotspare": "Disabled",
+ "PersistentHotspareMode": "Disabled",
+ "RAIDMode": "None",
+ "RealtimeCapability": "Capable",
+ "ReconstructRatePercent": 30,
+ "RollupStatus": "OK",
+ "SASAddress": "54CD98F0760C3D00",
+ "SecurityStatus": "SecurityKeyAssigned",
+ "SharedSlotAssignmentAllowed": "NotApplicable",
+ "SlicedVDCapability": "Supported",
+ "SpindownIdleTimeSeconds": 30,
+ "SupportControllerBootMode": "Supported",
+ "SupportEnhancedAutoForeignImport": "Supported",
+ "SupportRAID10UnevenSpans": "Supported",
+ "SupportedInitializationTypes": [
+ "Slow",
+ "Fast"
+ ],
+ "SupportedInitializationTypes@odata.count": 2,
+ "SupportsLKMtoSEKMTransition": "No",
+ "T10PICapability": "NotSupported"
+ }
+ }}}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = resp
+ redfish_response_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/JID_XXXXXXXXXXXXX"}
+ job_id, time_set = self.module.set_attributes(f_module, redfish_str_controller_conn)
+ assert job_id == "JID_XXXXXXXXXXXXX"
+ assert time_set == {'ApplyTime': "Immediate"}
+
+ param.update({"attributes": {"ControllerMode": "HBA", 'RandomKey': 123}})
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception):
+ result = self.module.set_attributes(f_module, redfish_str_controller_conn)
+ assert result['msg'] == "Other attributes cannot be updated when ControllerMode is provided as input."
+
+ def test_main_success_attributes(self, redfish_str_controller_conn, redfish_response_mock, redfish_default_args, mocker):
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
+ "controller_id": None,
+ "attributes": {"ControllerMode": "RAID", "CheckConsistencyMode": "Normal"},
+ "job_wait": True, "apply_time": "Immediate"}
+ resp = {"@Redfish.Settings": {"SupportedApplyTimes": ["Immediate", "OnReset", "AtMaintenanceWindowStart",
+ "InMaintenanceWindowOnReset"]},
+ "Id": "RAID.Integrated.1-1",
+ "Oem": {
+ "Dell": {
+ "DellStorageController": {
+ "AlarmState": "AlarmNotPresent",
+ "AutoConfigBehavior": "NotApplicable",
+ "BackgroundInitializationRatePercent": 30,
+ "BatteryLearnMode": "null",
+ "BootVirtualDiskFQDD": "null",
+ "CacheSizeInMB": 2048,
+ "CachecadeCapability": "NotSupported",
+ "CheckConsistencyMode": "StopOnError",
+ "ConnectorCount": 2,
+ "ControllerBootMode": "ContinueBootOnError",
+ "ControllerFirmwareVersion": "25.5.9.0001",
+ "ControllerMode": "RAID",
+ "CopybackMode": "OnWithSMART",
+ "CurrentControllerMode": "RAID",
+ "Device": "0",
+ "DeviceCardDataBusWidth": "Unknown",
+ "DeviceCardSlotLength": "Unknown",
+ "DeviceCardSlotType": "Unknown",
+ "DriverVersion": "6.706.06.00",
+ "EncryptionCapability": "LocalKeyManagementCapable",
+ "EncryptionMode": "LocalKeyManagement",
+ "EnhancedAutoImportForeignConfigurationMode": "Disabled",
+ "KeyID": "MyNewKey@123",
+ "LastSystemInventoryTime": "2022-12-23T04:59:41+00:00",
+ "LastUpdateTime": "2022-12-23T17:59:44+00:00",
+ "LoadBalanceMode": "Automatic",
+ "MaxAvailablePCILinkSpeed": "Generation 3",
+ "MaxDrivesInSpanCount": 32,
+ "MaxPossiblePCILinkSpeed": "Generation 3",
+ "MaxSpansInVolumeCount": 8,
+ "MaxSupportedVolumesCount": 64,
+ "PCISlot": "null",
+ "PatrolReadIterationsCount": 0,
+ "PatrolReadMode": "Automatic",
+ "PatrolReadRatePercent": 30,
+ "PatrolReadState": "Stopped",
+ "PatrolReadUnconfiguredAreaMode": "Enabled",
+ "PersistentHotspare": "Disabled",
+ "PersistentHotspareMode": "Disabled",
+ "RAIDMode": "None",
+ "RealtimeCapability": "Capable",
+ "ReconstructRatePercent": 30,
+ "RollupStatus": "OK",
+ "SASAddress": "54CD98F0760C3D00",
+ "SecurityStatus": "SecurityKeyAssigned",
+ "SharedSlotAssignmentAllowed": "NotApplicable",
+ "SlicedVDCapability": "Supported",
+ "SpindownIdleTimeSeconds": 30,
+ "SupportControllerBootMode": "Supported",
+ "SupportEnhancedAutoForeignImport": "Supported",
+ "SupportRAID10UnevenSpans": "Supported",
+ "SupportedInitializationTypes": [
+ "Slow",
+ "Fast"
+ ],
+ "SupportedInitializationTypes@odata.count": 2,
+ "SupportsLKMtoSEKMTransition": "No",
+ "T10PICapability": "NotSupported"
+ }
+ }}}
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists', return_value=None)
+ result = self._run_module(redfish_default_args)
+ assert result['msg'] == "controller_id is required to perform this operation."
+ param.update({"controller_id": "RAID.Integrated.1-1"})
+ param.update({"job_wait": False})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.set_attributes',
+ return_value=("JID_XXXXXXXXXXXXX", {'ApplyTime': "Immediate"}))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"job_wait": True})
+ redfish_default_args.update(param)
+ redfish_response_mock.json_data = {"JobState": "Completed"}
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.check_id_exists', return_value=None)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.set_attributes',
+ return_value=("JID_XXXXXXXXXXXXX", {'ApplyTime': "Immediate"}))
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.wait_for_job_completion',
+ return_value=(redfish_response_mock, "Success"))
+ result = self._run_module(redfish_default_args)
+ assert result['msg'] == "Successfully applied the controller attributes."
+
+ redfish_response_mock.json_data = {"JobState": "Failed"}
+ result = self._run_module(redfish_default_args)
+ assert result['msg'] == "Successfully applied the controller attributes."
+
@pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
- ImportError, ValueError, TypeError])
+ ImportError, ValueError, TypeError, HTTPError])
def test_main_error(self, redfish_str_controller_conn, redfish_response_mock, mocker,
exc_type, redfish_default_args):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "ResetConfig", "controller_id": "RAID.Integrated.1-1"}
redfish_default_args.update(param)
mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.validate_inputs', return_value=None)
@@ -268,14 +952,14 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.ctrl_reset_config',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(redfish_default_args)
assert result['failed'] is True
assert 'msg' in result
def test_main_success(self, redfish_str_controller_conn, redfish_response_mock, redfish_default_args, mocker):
- param = {"baseuri": "192.168.0.1", "username": "username", "password": "password",
+ param = {"baseuri": "XX.XX.XX.XX", "username": "username", "password": "password",
"command": "SetControllerKey", "key": "Key@123", "key_id": "keyid@123",
"controller_id": "RAID.Integrated.1-1",
"target": ["Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1"]}
@@ -314,3 +998,19 @@ class TestIdracRedfishStorageController(FakeAnsibleModule):
return_value={"JobState": "Failed"})
result = self._run_module(redfish_default_args)
assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.strip_substr_dict',
+ return_value={"JobState": "Completed"})
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"command": "OnlineCapacityExpansion", "job_wait": True, "volume_id": ['123']})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.online_capacity_expansion',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
+ param.update({"command": "LockVirtualDisk", "job_wait": True, "volume_id": ['123']})
+ redfish_default_args.update(param)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_storage_controller.lock_virtual_disk',
+ return_value=("", "", "JID_XXXXXXXXXXXXX"))
+ result = self._run_module(redfish_default_args)
+ assert result["task"]["id"] == "JID_XXXXXXXXXXXXX"
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
index 3f4ca4977..a6fbb1d04 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -14,10 +14,10 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_reset
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from mock import MagicMock, patch, Mock
+from mock import MagicMock, Mock
from io import StringIO
from ansible.module_utils._text import to_text
@@ -85,7 +85,7 @@ class TestReset(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset', side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py
index 16d5b0307..bae1de38e 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_server_config_profile.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.4.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.4.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -12,345 +12,198 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
-import sys
+import mock
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_server_config_profile
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants,\
- AnsibleExitJson
-from mock import MagicMock, patch, Mock, mock_open
-from pytest import importorskip
-from ansible.module_utils.six.moves.urllib.parse import urlparse, ParseResult
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
-
-importorskip("omsdk.sdkfile")
-importorskip("omsdk.sdkcreds")
+SUCCESS_MSG = 'Successfully {0}ed the Server Configuration Profile'
+JOB_SUCCESS_MSG = 'Successfully triggered the job to {0} the Server Configuration Profile'
+PREVIEW_SUCCESS_MSG = 'Successfully previewed the Server Configuration Profile'
+CHANGES_FOUND = "Changes found to be applied."
+NO_CHANGES_FOUND = "No changes found to be applied."
+REDFISH_JOB_TRACKING = "idrac_server_config_profile.idrac_redfish_job_tracking"
class TestServerConfigProfile(FakeAnsibleModule):
module = idrac_server_config_profile
@pytest.fixture
- def idrac_server_configure_profile_mock(self, mocker):
- omsdk_mock = MagicMock()
+ def idrac_server_configure_profile_mock(self):
idrac_obj = MagicMock()
- omsdk_mock.file_share_manager = idrac_obj
- omsdk_mock.config_mgr = idrac_obj
return idrac_obj
@pytest.fixture
- def idrac_file_manager_server_config_profile_mock(self, mocker):
- try:
- file_manager_obj = mocker.patch(
- MODULE_PATH + 'idrac_server_config_profile.file_share_manager')
- except AttributeError:
- file_manager_obj = MagicMock()
- obj = MagicMock()
- file_manager_obj.create_share_obj.return_value = obj
- return file_manager_obj
-
- @pytest.fixture
def idrac_scp_redfish_mock(self, mocker, idrac_server_configure_profile_mock):
idrac_conn_class_mock = mocker.patch(MODULE_PATH + 'idrac_server_config_profile.iDRACRedfishAPI',
return_value=idrac_server_configure_profile_mock)
idrac_conn_class_mock.return_value.__enter__.return_value = idrac_server_configure_profile_mock
return idrac_server_configure_profile_mock
- def test_run_export_import_http(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "export",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML", "export_use": "Default"})
- f_module = self.get_module_mock(params=idrac_default_args)
- export_response = {"msg": "Successfully exported the Server Configuration Profile.",
- "scp_status": {"Name": "Export: Server Configuration Profile", "PercentComplete": 100,
- "TaskState": "Completed", "TaskStatus": "OK", "Id": "JID_236654661194"}}
- mocker.patch(MODULE_PATH + "idrac_server_config_profile.urlparse",
- return_value=ParseResult(scheme='http', netloc='192.168.0.1',
- path='/share/',
- params='', query='', fragment=''))
- mocker.patch(MODULE_PATH + "idrac_server_config_profile.response_format_change",
- return_value=export_response)
- result = self.module.run_export_import_scp_http(idrac_scp_redfish_mock, f_module)
- assert result["msg"] == "Successfully exported the Server Configuration Profile."
- idrac_default_args.update({"command": "import"})
- f_module = self.get_module_mock(params=idrac_default_args)
- import_response = {"msg": "Successfully imported the Server Configuration Profile.",
- "scp_status": {"Name": "Import: Server Configuration Profile", "PercentComplete": 100,
- "TaskState": "Completed", "TaskStatus": "OK", "Id": "JID_236654661194"}}
- mocker.patch(MODULE_PATH + "idrac_server_config_profile.response_format_change",
- return_value=import_response)
- result = self.module.run_export_import_scp_http(idrac_scp_redfish_mock, f_module)
- assert result["msg"] == "Successfully imported the Server Configuration Profile."
+ @pytest.fixture
+ def idrac_redfish_job_tracking_mock(self, mocker, idrac_server_configure_profile_mock):
+ idrac_conn_class_mock = mocker.patch(MODULE_PATH + REDFISH_JOB_TRACKING,
+ return_value=idrac_server_configure_profile_mock)
+ idrac_conn_class_mock.return_value.__enter__.return_value = idrac_server_configure_profile_mock
+ idrac_conn_class_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"}
+ return idrac_server_configure_profile_mock
- def test_http_share_msg_main(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "http://192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": False, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False})
- share_return = {"Oem": {"Dell": {"MessageId": "SYS069"}}}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value=share_return)
- result = self._run_module(idrac_default_args)
- assert result["msg"] == "Successfully triggered the job to import the Server Configuration Profile."
- share_return = {"Oem": {"Dell": {"MessageId": "SYS053"}}}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value=share_return)
+ @pytest.mark.parametrize("params", [
+ {"message": SUCCESS_MSG.format("export"),
+ "mparams": {"share_name": "\\{SCP SHARE IP}\\share", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file.xml",
+ "proxy_port": 80, "export_format": "XML"}},
+ {"message": SUCCESS_MSG.format("export"),
+ "mparams": {"share_name": "https://{SCP SHARE IP}/myshare/", "proxy_type": "socks4",
+ "proxy_support": True, "job_wait": True, "scp_components": "IDRAC",
+ "proxy_port": 80, "export_format": "JSON", "proxy_server": "PROXY_SERVER_IP",
+ "proxy_username": "proxy_username"}},
+ {"message": JOB_SUCCESS_MSG.format("export"),
+ "mparams": {"share_name": "{SCP SHARE IP}:/nfsshare", "job_wait": False,
+ "scp_components": "IDRAC", "scp_file": "scp_file.txt"}},
+ {"message": JOB_SUCCESS_MSG.format("export"),
+ "mparams": {"share_name": "/share", "job_wait": False,
+ "scp_components": "IDRAC", "scp_file": "scp_file.json"}},
+ ])
+ def test_run_export_scp(self, params, idrac_scp_redfish_mock, idrac_redfish_job_tracking_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_user": "sharename", "command": "export",
+ "export_use": "Default", "include_in_export": "default"})
+ idrac_default_args.update(params['mparams'])
+ mocker.patch("builtins.open", mocker.mock_open())
+ idrac_redfish_job_tracking_mock.status_code = 202
+ idrac_redfish_job_tracking_mock.success = True
+ mocker.patch(MODULE_PATH + REDFISH_JOB_TRACKING,
+ return_value=(False, False, {"Status": "Completed"}, {}))
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert params['message'] in result['msg']
+
+ @pytest.mark.parametrize("params", [
+ {"message": CHANGES_FOUND,
+ "json_data": {"Id": "JID_932024672685", "Message": SUCCESS_MSG.format("import"), "MessageId": "SYS081",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "check_mode": True,
+ "mparams": {"share_name": "{SCP SHARE IP}:/nfsshare", "share_user": "sharename",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file1.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": NO_CHANGES_FOUND,
+ "json_data": {"Id": "JID_932024672685", "Message": SUCCESS_MSG.format("import"), "MessageId": "SYS069",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "check_mode": True,
+ "mparams": {"share_name": "\\{SCP SHARE IP}\\share", "share_user": "sharename",
+ "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file1.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": SUCCESS_MSG.format("import"),
+ "json_data": {"Id": "JID_932024672685", "Message": NO_CHANGES_FOUND, "MessageId": "SYS043",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "mparams": {"share_name": "/share", "share_user": "sharename",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file1.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": SUCCESS_MSG.format("import"),
+ "json_data": {"Id": "JID_932024672685", "Message": SUCCESS_MSG.format("import"), "MessageId": "SYS069",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "mparams": {"share_name": "https://{SCP SHARE IP}/share", "share_user": "sharename",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file1.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": SUCCESS_MSG.format("import"),
+ "json_data": {"Id": "JID_932024672685", "Message": SUCCESS_MSG.format("import"), "MessageId": "SYS053",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "mparams": {"share_name": "https://{SCP SHARE IP}/share", "share_user": "sharename",
+ "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file1.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": SUCCESS_MSG.format("import"),
+ "json_data": {"Id": "JID_932024672685", "Message": NO_CHANGES_FOUND, "MessageId": "SYS069",
+ "PercentComplete": 100, "file": "https://{SCP SHARE PATH}/{SCP FILE NAME}.json"},
+ "mparams": {"command": "import", "job_wait": True, "scp_components": "IDRAC",
+ "import_buffer": "<SystemConfiguration><Component FQDD='iDRAC.Embedded.1'><Attribute Name='IPMILan.1#Enable'> \
+ <Value>Disabled</Value></Attribute></Component><Component FQDD='iDRAC.Embedded.1'>"}},
+ ])
+ @mock.patch(MODULE_PATH + "idrac_server_config_profile.exists", return_value=True)
+ def test_run_import_scp(self, mock_exists, params, idrac_scp_redfish_mock, idrac_redfish_job_tracking_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"command": "import"})
+ idrac_default_args.update(params['mparams'])
+ mocker.patch("builtins.open", mocker.mock_open())
+ if params.get('check_mode'):
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.preview_scp_redfish',
+ return_value=params['json_data'])
+ elif params['mparams']['job_wait']:
+ mocker.patch(MODULE_PATH + REDFISH_JOB_TRACKING,
+ return_value=(False, False, {"Status": "Completed"}, {}))
+ else:
+ idrac_scp_redfish_mock.import_scp.return_value = params['json_data']
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert params['message'] in result['msg']
+
+ @pytest.mark.parametrize("params", [
+ {"message": PREVIEW_SUCCESS_MSG,
+ "check_mode": True,
+ "mparams": {"share_name": "{SCP SHARE IP}:/nfsshare", "share_user": "sharename",
+ "command": "preview", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file4.xml"}},
+ {"message": PREVIEW_SUCCESS_MSG,
+ "mparams": {"share_name": "https://{SCP SHARE IP}/nfsshare", "share_user": "sharename",
+ "command": "preview", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file4.xml"}},
+ ])
+ def test_preview_scp(self, params, idrac_scp_redfish_mock, idrac_redfish_job_tracking_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"command": "preview"})
+ idrac_default_args.update(params['mparams'])
+ mocker.patch(MODULE_PATH + REDFISH_JOB_TRACKING,
+ return_value=(False, False, {"Status": "Completed"}, {}))
+ result = self._run_module(idrac_default_args, check_mode=params.get('check_mode', False))
+ assert params['message'] in result['msg']
+
+ def test_preview_scp_redfish_throws_ex(self, idrac_scp_redfish_mock, idrac_redfish_job_tracking_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "{SCP SHARE IP}:/nfsshare", "share_user": "sharename",
+ "command": "preview", "job_wait": True,
+ "scp_components": "IDRAC", "scp_file": "scp_file5.xml"})
+ idrac_redfish_job_tracking_mock.headers = {"Location": "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"}
+ mocker.patch(MODULE_PATH + 'idrac_server_config_profile.idrac_redfish_job_tracking',
+ return_value=(True, False, {"Status": "Failed"}, {}))
result = self._run_module(idrac_default_args)
- assert result["msg"] == "Successfully triggered the job to import the Server Configuration Profile."
- idrac_default_args.update({"command": "export"})
- share_return = {"Oem": {"Dell": {"MessageId": "SYS043"}}}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value=share_return)
+ assert result['failed']
+
+ def test_import_scp_http_throws_exception(self, idrac_scp_redfish_mock, idrac_redfish_job_tracking_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"share_name": "https://{SCP SHARE IP}/myshare/", "share_user": "sharename",
+ "command": "import", "job_wait": True, "scp_components": "IDRAC",
+ "scp_file": "scp_file2.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"})
+ mocker.patch(MODULE_PATH + REDFISH_JOB_TRACKING,
+ return_value=(True, False, {"Status": "Failed"}, {}))
result = self._run_module(idrac_default_args)
- assert result["msg"] == "Successfully triggered the job to export the Server Configuration Profile."
-
- def test_export_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": False, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False})
- f_module = self.get_module_mock(params=idrac_default_args)
- share_return = {"Oem": {"Dell": {"MessageId": "SYS069"}}}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value=share_return)
- f_module.check_mode = False
- result = self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
- assert result["file"] == "192.168.0.1:/share/scp_file.xml"
- idrac_default_args.update({"share_name": "\\\\100.96.16.123\\cifsshare"})
- result = self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
- assert result["file"] == "\\\\100.96.16.123\\cifsshare\\scp_file.xml"
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
- return_value={"TaskStatus": "Critical"})
- with pytest.raises(Exception) as ex:
- self.module.export_scp_redfish(f_module, idrac_scp_redfish_mock)
- assert ex.value.args[0] == "Failed to import scp."
-
- def test_response_format_change(self, idrac_scp_redfish_mock, idrac_default_args):
- idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False})
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_scp_redfish_mock.json_data = {"Oem": {"Dell": {"key": "value"}}}
- result = self.module.response_format_change(idrac_scp_redfish_mock, f_module, "export_scp.yml")
- assert result["key"] == "value"
- idrac_default_args.update({"command": "export"})
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.response_format_change(idrac_scp_redfish_mock, f_module, "export_scp.yml")
- assert result["key"] == "value"
-
- def test_preview_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare", "share_user": "sharename",
- "share_password": "sharepswd", "command": "preview", "job_wait": True,
- "scp_components": "IDRAC", "scp_file": "scp_file.xml",
- "end_host_power_state": "On", "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
- "job_wait": True}
- f_module.check_mode = False
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
- return_value=(share, "scp_file.xml"))
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
- return_value={"Status": "Success"})
- result = self.module.preview_scp_redfish(f_module, idrac_scp_redfish_mock, True, import_job_wait=False)
- assert result["Status"] == "Success"
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
- return_value={"TaskStatus": "Critical"})
- with pytest.raises(Exception) as ex:
- self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
- assert ex.value.args[0] == "Failed to preview scp."
- idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare", "share_user": "sharename",
- "share_password": "sharepswd", "command": "preview", "job_wait": True,
- "scp_components": "IDRAC", "scp_file": "scp_file.xml",
- "end_host_power_state": "On", "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- f_module.check_mode = False
- share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
- "job_wait": True, "share_type": "LOCAL", "share_name": "share_name"}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
- return_value=(share, "scp_file.xml"))
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.exists',
- return_value=False)
- with pytest.raises(Exception) as ex:
- self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, False)
- assert ex.value.args[0] == "Invalid file path provided."
-
- def test_import_scp_redfish(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- f_module.check_mode = True
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.preview_scp_redfish',
- return_value={"MessageId": "SYS081"})
- with pytest.raises(Exception) as ex:
- self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
- assert ex.value.args[0] == "Changes found to be applied."
- idrac_default_args.update({"share_name": "http://192.168.0.1/http-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- f_module.check_mode = False
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
- return_value={"Status": "Success"})
- result = self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
- assert result["Status"] == "Success"
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.response_format_change',
- return_value={"TaskStatus": "Critical"})
- with pytest.raises(Exception) as ex:
- self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, True)
- assert ex.value.args[0] == "Failed to import scp."
- idrac_default_args.update({"share_name": "local-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- f_module.check_mode = False
- share = {"share_ip": "192.168.0.1", "share_user": "sharename", "share_password": "password",
- "job_wait": True, "share_type": "LOCAL", "share_name": "share_name"}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_share_details',
- return_value=(share, "scp_file.xml"))
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.exists',
- return_value=False)
+ assert result['failed']
+
+ @pytest.mark.parametrize("params", [
+ {"message": "Invalid file path provided.",
+ "mparams": {"share_name": "/share/", "share_user": "sharename",
+ "command": "import", "job_wait": False, "scp_components": "IDRAC",
+ "scp_file": "scp_file3.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ {"message": "proxy_support is True but all of the following are missing: proxy_server",
+ "mparams": {"share_name": "https://{SCP SHARE IP}/myshare/", "proxy_type": "http",
+ "proxy_support": True, "job_wait": True, "scp_components": "IDRAC",
+ "proxy_port": 80, "export_format": "JSON",
+ "proxy_username": "proxy_username"}},
+ {"message": "import_buffer is mutually exclusive with share_name",
+ "mparams": {"share_name": "{SCP SHARE IP}:/nfsshare", "command": "preview", "job_wait": False,
+ "import_buffer": "<SystemConfiguration><Component FQDD='iDRAC.Embedded.1'><Attribute Name='IPMILan.1#Enable'> \
+ <Value>Disabled</Value></Attribute></Component><Component FQDD='iDRAC.Embedded.1'>"}},
+ {"message": "import_buffer is mutually exclusive with scp_file",
+ "mparams": {"scp_file": "example.json", "job_wait": False, "command": "import",
+ "import_buffer": "<SystemConfiguration><Component FQDD='iDRAC.Embedded.1'><Attribute Name='IPMILan.1#Enable'> \
+ <Value>Disabled</Value></Attribute></Component><Component FQDD='iDRAC.Embedded.1'>"}},
+ {"message": "The option ALL cannot be used with options IDRAC, BIOS, NIC, or RAID.",
+ "mparams": {"share_name": "https://{SCP SHARE IP}/myshare/", "share_user": "sharename",
+ "command": "import", "job_wait": True, "scp_components": ["IDRAC", "ALL"],
+ "scp_file": "scp_file2.xml", "end_host_power_state": "On",
+ "shutdown_type": "Graceful"}},
+ ])
+ def test_scp_invalid(self, params, idrac_scp_redfish_mock, idrac_default_args):
+ idrac_default_args.update(params['mparams'])
with pytest.raises(Exception) as ex:
- self.module.import_scp_redfish(f_module, idrac_scp_redfish_mock, False)
- assert ex.value.args[0] == "Invalid file path provided."
-
- def test_get_scp_file_format(self, idrac_scp_redfish_mock, idrac_default_args):
- idrac_default_args.update({"share_name": "192.168.0.1:/share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.get_scp_file_format(f_module)
- assert result == "scp_file.xml"
- idrac_default_args.update({"scp_file": None})
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.get_scp_file_format(f_module)
- assert result.startswith("idrac_ip_") is True
-
- def test_main_success_case(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "http://192.168.0.1/http-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "import",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value={"MessageId": "SYS069"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {'MessageId': 'SYS069'}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.run_export_import_scp_http',
- return_value={"MessageId": "SYS053"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {'MessageId': 'SYS053'}
- idrac_default_args.update({"share_name": "192.168.0.1:/nfsshare"})
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
- return_value={"Message": "No changes were applied since the current component configuration "
- "matched the requested configuration"})
- result = self._run_module(idrac_default_args)
- assert result["changed"] is False
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
- return_value={"MessageId": "SYS043"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {'MessageId': 'SYS043'}
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.import_scp_redfish',
- return_value={"MessageId": "SYS069"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {'MessageId': 'SYS069'}
- idrac_default_args.update({"command": "export"})
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.export_scp_redfish',
- return_value={"Status": "Success"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {'Status': 'Success'}
- idrac_default_args.update({"command": "preview"})
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.preview_scp_redfish',
- return_value={"MessageId": "SYS081"})
- result = self._run_module(idrac_default_args)
- assert result["scp_status"] == {"MessageId": "SYS081"}
-
- def test_get_scp_share_details(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "export",
- "job_wait": True, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- mocker.patch(MODULE_PATH + 'idrac_server_config_profile.get_scp_file_format',
- return_value="export_scp.xml")
- result = self.module.get_scp_share_details(f_module)
- assert result[1] == "export_scp.xml"
-
- def test_wait_for_response(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "export",
- "job_wait": False, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "XML",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- idrac_scp_redfish_mock.headers = {"Location": "/redfish/v1/TaskService/Tasks/JID_123456789"}
- resp_return_value = {"return_data": b"<SystemConfiguration Model='PowerEdge MX840c'>"
- b"<Component FQDD='System.Embedded.1'>"
- b"<Attribute Name='Backplane.1#BackplaneSplitMode'>0</Attribute>"
- b"</Component> </SystemConfiguration>",
- "return_job": {"JobState": "Completed", "JobType": "ExportConfiguration",
- "PercentComplete": 100, "Status": "Success"}}
- idrac_scp_redfish_mock.wait_for_job_complete.return_value = resp_return_value["return_data"]
- idrac_scp_redfish_mock.job_resp = resp_return_value["return_job"]
- share = {"share_name": "/local_share", "file_name": "export_file.xml"}
- if sys.version_info.major == 3:
- builtin_module_name = 'builtins'
- else:
- builtin_module_name = '__builtin__'
- with patch("{0}.open".format(builtin_module_name), mock_open(read_data=resp_return_value["return_data"])) as mock_file:
- result = self.module.wait_for_response(idrac_scp_redfish_mock, f_module, share, idrac_scp_redfish_mock)
- assert result.job_resp == resp_return_value["return_job"]
-
- def test_wait_for_response_json(self, idrac_scp_redfish_mock, idrac_default_args, mocker):
- idrac_default_args.update({"share_name": "/local-share", "share_user": "sharename",
- "share_password": "sharepswd", "command": "export",
- "job_wait": False, "scp_components": "IDRAC",
- "scp_file": "scp_file.xml", "end_host_power_state": "On",
- "shutdown_type": "Graceful", "export_format": "JSON",
- "export_use": "Default", "validate_certs": False, "idrac_port": 443})
- f_module = self.get_module_mock(params=idrac_default_args)
- resp_return_value = {"return_data": {
- "SystemConfiguration": {"Components": [
- {"FQDD": "SupportAssist.Embedded.1",
- "Attributes": [{"Name": "SupportAssist.1#SupportAssistEULAAccepted"}]
- }]}
- },
- "return_job": {"JobState": "Completed", "JobType": "ExportConfiguration",
- "PercentComplete": 100, "Status": "Success"}}
- mock_scp_json_data = idrac_scp_redfish_mock
- mock_scp_json_data.json_data = resp_return_value["return_data"]
- idrac_scp_redfish_mock.wait_for_job_complete.return_value = mock_scp_json_data
- idrac_scp_redfish_mock.job_resp = resp_return_value["return_job"]
- share = {"share_name": "/local_share", "file_name": "export_file.xml"}
- if sys.version_info.major == 3:
- builtin_module_name = 'builtins'
- else:
- builtin_module_name = '__builtin__'
- with patch("{0}.open".format(builtin_module_name), mock_open(read_data=str(resp_return_value["return_data"]))) as mock_file:
- result = self.module.wait_for_response(idrac_scp_redfish_mock, f_module, share, idrac_scp_redfish_mock)
- assert result.job_resp == resp_return_value["return_job"]
+ self._run_module(idrac_default_args)
+ assert params['message'] in ex.value.args[0]['msg']
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py
index ae89c2808..a0cf954b9 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_syslog.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2018-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -17,8 +17,8 @@ import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_syslog
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
from io import StringIO
from ansible.module_utils._text import to_text
from pytest import importorskip
@@ -73,6 +73,17 @@ class TestSetupSyslog(FakeAnsibleModule):
'msg': {'Status': 'Success', 'message': 'No changes found to commit!'}},
'changed': False}
+ @pytest.mark.parametrize("mock_message", [{"Status": "Success", "Message": "No changes found to commit!"},
+ {"Status": "Success", "Message": "No changes found"}])
+ def test_main_setup_syslog_success_case01_extra(self, mock_message, idrac_connection_setup_syslog_mock, idrac_default_args, mocker,
+ idrac_file_manager_mock):
+ idrac_default_args.update({"share_name": "sharename", 'share_password': None, "syslog": "Enabled",
+ 'share_mnt': None, 'share_user': None})
+ mocker.patch(
+ MODULE_PATH + 'idrac_syslog.run_setup_idrac_syslog', return_value=mock_message)
+ result = self._run_module(idrac_default_args)
+ assert result['msg'] == "Successfully fetch the syslogs."
+
def test_run_setup_idrac_syslog_success_case01(self, idrac_connection_setup_syslog_mock, idrac_default_args,
idrac_file_manager_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
@@ -187,7 +198,7 @@ class TestSetupSyslog(FakeAnsibleModule):
else:
mocker.patch(MODULE_PATH +
'idrac_syslog.run_setup_idrac_syslog',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
@@ -195,3 +206,63 @@ class TestSetupSyslog(FakeAnsibleModule):
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ def test_run_setup_idrac_syslog_invalid_share(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock, mocker):
+ idrac_default_args.update(
+ {"share_name": "dummy_share_name", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Disabled", "share_password": "sharepassword"})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ obj = MagicMock()
+ obj.IsValid = True
+
+ mocker.patch(
+ MODULE_PATH + "idrac_syslog.file_share_manager.create_share_obj", return_value=(obj))
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_setup_syslog_mock.config_mgr.disable_syslog.return_value = message
+ msg = self.module.run_setup_idrac_syslog(
+ idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'changes_applicable': True,
+ 'message': 'changes found to commit!', 'changed': True, 'Status': 'Success'}
+
+ obj.IsValid = False
+ mocker.patch(
+ MODULE_PATH + "idrac_syslog.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_setup_idrac_syslog(
+ idrac_connection_setup_syslog_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
+
+ def test_run_setup_idrac_syslog_disabled(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock, mocker):
+ idrac_default_args.update(
+ {"share_name": "dummy_share_name", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Disabled", "share_password": "sharepassword"})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
+ obj = MagicMock()
+ obj.IsValid = True
+
+ mocker.patch(
+ MODULE_PATH + "idrac_syslog.file_share_manager.create_share_obj", return_value=(obj))
+ message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
+ "Status": "Success"}
+ idrac_connection_setup_syslog_mock.config_mgr.is_change_applicable.return_value = message
+ idrac_connection_setup_syslog_mock.config_mgr.disable_syslog.return_value = message
+ msg = self.module.run_setup_idrac_syslog(
+ idrac_connection_setup_syslog_mock, f_module)
+ assert msg == {'changes_applicable': True,
+ 'message': 'changes found to commit!', 'changed': True, 'Status': 'Success'}
+
+ def test_main_idrac_configure_timezone_attr_exception_handling_case(self, idrac_connection_setup_syslog_mock, idrac_default_args,
+ idrac_file_manager_mock, mocker):
+ idrac_default_args.update(
+ {"share_name": "dummy_share_name", "share_mnt": "mountname", "share_user": "shareuser",
+ "syslog": "Disabled", "share_password": "sharepassword"})
+ mocker.patch(
+ MODULE_PATH + 'idrac_syslog.run_setup_idrac_syslog',
+ side_effect=AttributeError('NoneType'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['msg'] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py
index dbbb130e9..6913cb908 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_system_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,7 +15,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_system_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock, Mock
from pytest import importorskip
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -65,7 +65,7 @@ class TestSystemInventory(FakeAnsibleModule):
if exc_type not in [HTTPError, SSLValidationError]:
idrac_system_info_connection_mock.get_json_device.side_effect = exc_type('test')
else:
- idrac_system_info_connection_mock.get_json_device.side_effect = exc_type('http://testhost.com', 400,
+ idrac_system_info_connection_mock.get_json_device.side_effect = exc_type('https://testhost.com', 400,
'http error message',
{
"accept-type": "application/json"},
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py
index ee1d9d2e8..7358efed2 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_timezone_ntp.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 6.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -14,8 +14,8 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_timezone_ntp
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock, PropertyMock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock, Mock
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
@@ -79,6 +79,29 @@ class TestConfigTimezone(FakeAnsibleModule):
result = self._run_module(idrac_default_args)
assert result["msg"] == "Successfully configured the iDRAC time settings."
+ status_msg = {"Status": "Failure", "Message": "No changes found to commit!",
+ "msg": {"Status": "Success", "Message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.run_idrac_timezone_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC time settings."
+
+ status_msg = {"Status": "Success",
+ "msg": {"Status": "Success", "Message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.run_idrac_timezone_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC time settings."
+ assert result["changed"] is True
+
+ status_msg = {"Status": "Success", "Message": "No changes found",
+ "msg": {"Status": "Success", "Message": "No changes found to commit!"}}
+ mocker.patch(MODULE_PATH +
+ 'idrac_timezone_ntp.run_idrac_timezone_config', return_value=status_msg)
+ result = self._run_module(idrac_default_args)
+ assert result["msg"] == "Successfully configured the iDRAC time settings."
+ assert result["changed"] is True
+
def test_run_idrac_timezone_config_success_case01(self, idrac_connection_configure_timezone_mock,
idrac_default_args, idrac_file_manager_config_timesone_mock):
idrac_default_args.update({"share_name": None, "share_mnt": None, "share_user": None,
@@ -218,7 +241,7 @@ class TestConfigTimezone(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'idrac_timezone_ntp.run_idrac_timezone_config',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
@@ -226,3 +249,27 @@ class TestConfigTimezone(FakeAnsibleModule):
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ def test_run_idrac_timezone_config(self, mocker, idrac_default_args,
+ idrac_connection_configure_timezone_mock,
+ idrac_file_manager_config_timesone_mock):
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ obj = MagicMock()
+ obj.IsValid = False
+ mocker.patch(
+ MODULE_PATH + "idrac_timezone_ntp.file_share_manager.create_share_obj", return_value=(obj))
+ with pytest.raises(Exception) as exc:
+ self.module.run_idrac_timezone_config(
+ idrac_connection_configure_timezone_mock, f_module)
+ assert exc.value.args[0] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
+
+ def test_main_idrac_configure_timezone_attr_exception_handling_case(self, mocker, idrac_default_args,
+ idrac_connection_configure_timezone_mock,
+ idrac_file_manager_config_timesone_mock):
+ idrac_default_args.update({"share_name": None})
+ mocker.patch(
+ MODULE_PATH + 'idrac_timezone_ntp.run_idrac_timezone_config',
+ side_effect=AttributeError('NoneType'))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['msg'] == "Unable to access the share. Ensure that the share name, share mount, and share credentials provided are correct."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
index 2fa528d0d..0ef6e6da3 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -16,8 +16,8 @@ import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_user
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
from ansible.module_utils._text import to_text
from io import StringIO
@@ -50,6 +50,17 @@ class TestIDRACUser(FakeAnsibleModule):
resp = self.module.get_payload(f_module, 1, action="update")
assert resp["Users.1.UserName"] == idrac_default_args["new_user_name"]
+ def test_get_payload_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "custom_privilege": 17, "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args)
+ resp = self.module.get_payload(f_module, 1)
+ assert resp["Users.1.Privilege"] == idrac_default_args["custom_privilege"]
+
def test_convert_payload_xml(self, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
"user_name": "test", "user_password": "password",
@@ -134,6 +145,7 @@ class TestIDRACUser(FakeAnsibleModule):
response = self.module.get_user_account(f_module, idrac_connection_user_mock)
assert response[0]["Users.2#UserName"] == "test_user"
assert response[3] == 3
+ assert response[4] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
def test_get_user_account_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
@@ -145,11 +157,23 @@ class TestIDRACUser(FakeAnsibleModule):
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.export_scp",
return_value=MagicMock())
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.get_idrac_local_account_attr",
- return_value={"Users.2#UserName": "test_user", "Users.3#UserName": ""})
+ return_value={"Users.2#UserName": "test_user", "Users.3#UserName": "test"})
f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
response = self.module.get_user_account(f_module, idrac_connection_user_mock)
- assert response[3] == 3
- assert response[4] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
+ assert response[2] == 3
+ assert response[1] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
+
+ def test_get_user_account_invalid_name(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as err:
+ self.module.get_user_account(f_module, idrac_connection_user_mock)
+ assert err.value.args[0] == "User name is not valid."
def test_create_or_modify_account_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
@@ -325,9 +349,52 @@ class TestIDRACUser(FakeAnsibleModule):
None, None, user_attr)
assert response[1] == "Successfully updated user account."
+ def test_create_or_modify_account_both_slot_empty_input(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
+ return_value={"Message": "Successfully created a request."})
+ slot_id = 2
+ slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_id, slot_uri,
+ slot_id, slot_uri, user_attr)
+ assert response[1] == "Successfully updated user account."
+
+ def test_create_or_modify_account_both_slot_empty_none_input(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
+ mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
+ mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
+ return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
+ mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
+ return_value={"Message": "Successfully created a request."})
+ # slot_id = 2
+ # slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ user_attr = {"User.2#UserName": "test_user"}
+ with pytest.raises(Exception) as exc:
+ self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
+ None, None, user_attr)
+ assert exc.value.args[0] == "Maximum number of users reached. Delete a user account and retry the operation."
+
@pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
ConnectionError, HTTPError, ImportError, RuntimeError])
- def test_main(self, exc_type, idrac_connection_user_mock, idrac_default_args, mocker):
+ def test_main_execptions(self, exc_type, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
"user_name": "test", "user_password": "password",
"privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
@@ -340,11 +407,96 @@ class TestIDRACUser(FakeAnsibleModule):
side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account",
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
+ if exc_type != URLError:
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
+
+ def test_main_error(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ obj = MagicMock()
+ obj.json_data = {"error": {"message": "Some Error Occured"}}
+ mocker.patch(MODULE_PATH + "idrac_user.remove_user_account", return_value=(obj, "error"))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert result['msg'] == "Some Error Occured"
+
+ def test_main_error_oem(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "absent", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ obj = MagicMock()
+ obj.json_data = {"Oem": {"Dell": {"Message": "Unable to complete application of configuration profile values."}}}
+ mocker.patch(MODULE_PATH + "idrac_user.remove_user_account", return_value=(obj, "error"))
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert result['msg'] == "Unable to complete application of configuration profile values."
+
+ def test_main_create_oem(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ obj = MagicMock()
+ obj.json_data = {"Oem": {"Dell": {"Message": "This Message Does Not Exists"}}}
+ mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account", return_value=(obj, "created"))
+ # with pytest.raises(Exception) as exc:
+ result = self._run_module(idrac_default_args)
+ assert result['changed'] is True
+ assert result['msg'] == "created"
+
+ def test_main_state_some(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "some", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ assert result['msg'] == "value of state must be one of: present, absent, got: some"
+
+ def test_validate_input(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
+ "user_name": "test", "user_password": "password",
+ "custom_privilege": 512, "ipmi_lan_privilege": "Administrator",
+ "ipmi_serial_privilege": "Administrator", "enable": True,
+ "sol_enable": True, "protocol_enable": True,
+ "authentication_protocol": "SHA", "privacy_protocol": "AES"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as err:
+ self.module.validate_input(f_module)
+ assert err.value.args[0] == "custom_privilege value should be from 0 to 511."
+
+ idrac_default_args.update({"state": "absent"})
+ ret = self.module.validate_input(f_module)
+ assert ret is None
+
+ def test_compare_payload(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ json_payload = {"Users.1#Password": "MyDummyPassword"}
+ is_change_required = self.module.compare_payload(json_payload, None)
+ assert is_change_required is True
+
+ json_payload = {"Users.1#Privilege": "123"}
+ idrac_attr = {"Users.1#Privilege": "123"}
+ is_change_required = self.module.compare_payload(json_payload, idrac_attr)
+ assert is_change_required is False
+
+ json_payload = {"Users.1#Privilege": "123"}
+ idrac_attr = {"Users.1#Privilege": "124"}
+ is_change_required = self.module.compare_payload(json_payload, idrac_attr)
+ assert is_change_required is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user_info.py
new file mode 100644
index 000000000..82121c2d9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user_info.py
@@ -0,0 +1,231 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_user_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+from ansible.module_utils._text import to_text
+from io import StringIO
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+HTTPS_ADDRESS = 'https://testhost.com'
+
+
+class TestIDRACUserInfo(FakeAnsibleModule):
+ module = idrac_user_info
+
+ @pytest.fixture
+ def idrac_user_info_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_user_info_mock(self, mocker, idrac_user_info_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'idrac_user_info.iDRACRedfishAPI',
+ return_value=idrac_user_info_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_user_info_mock
+ return idrac_conn_mock
+
+ def test_fetch_all_accounts_success_case(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ obj = MagicMock()
+ obj.json_data = {"Members": [
+ {"UserName": "test", "Oem": {"Dell": "test"}}]}
+ mocker.patch(MODULE_PATH + "idrac_user_info.iDRACRedfishAPI.invoke_request",
+ return_value=(obj))
+ resp = self.module.fetch_all_accounts(idrac_connection_user_info_mock, "/acounts/accdetails")
+ assert resp[0].get("UserName") == "test"
+
+ def test_get_user_id_accounts(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ json_str = to_text(json.dumps({"data": "out"}))
+ idrac_default_args.update({"username": "test"})
+ obj = MagicMock()
+ obj.json_data = {"UserName": "test"}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + "idrac_user_info.iDRACRedfishAPI.invoke_request",
+ return_value=(obj))
+ mocker.patch(MODULE_PATH + "idrac_user_info.strip_substr_dict",
+ return_value=({"UserName": "test"}))
+ resp = self.module.get_user_id_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", 1)
+ assert resp.get("UserName") == "test"
+
+ obj = MagicMock()
+ obj.json_data = {"UserName": "test", "Oem": {"Dell": "test"}}
+ mocker.patch(MODULE_PATH + "idrac_user_info.iDRACRedfishAPI.invoke_request",
+ return_value=(obj))
+ mocker.patch(MODULE_PATH + "idrac_user_info.strip_substr_dict",
+ return_value=({"UserName": "test", "Oem": {"Dell": "test"}}))
+ resp = self.module.get_user_id_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", 1)
+ assert resp.get("UserName") == "test"
+
+ idrac_connection_user_info_mock.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ with pytest.raises(Exception) as exc:
+ self.module.get_user_id_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", 1)
+ assert exc.value.args[0] == "'user_id' is not valid."
+
+ def test_get_user_name_accounts(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ idrac_default_args.update({"username": "test"})
+ mocker.patch(MODULE_PATH + "idrac_user_info.fetch_all_accounts",
+ return_value=([{"UserName": "test"}]))
+ mocker.patch(MODULE_PATH + "idrac_user_info.strip_substr_dict",
+ return_value=({"UserName": "test"}))
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ resp = self.module.get_user_name_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", "test")
+ assert resp.get("UserName") == "test"
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.strip_substr_dict",
+ return_value=({"UserName": "test", "Oem": {"Dell": "test"}}))
+ resp = self.module.get_user_name_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", "test")
+ assert resp.get("UserName") == "test"
+
+ with pytest.raises(Exception) as exc:
+ self.module.get_user_name_accounts(
+ idrac_connection_user_info_mock, f_module, "/acounts/accdetails", "test1")
+ assert exc.value.args[0] == "'username' is not valid."
+
+ def test_get_all_accounts_single(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ idrac_default_args.update({"username": "test"})
+ mocker.patch(MODULE_PATH + "idrac_user_info.fetch_all_accounts",
+ return_value=([{"UserName": "test", "Oem": {"Dell": "test"}}]))
+ mocker.patch(MODULE_PATH + "idrac_user_info.strip_substr_dict",
+ return_value=({"UserName": "test", "Oem": {"Dell": "test"}}))
+ resp = self.module.get_all_accounts(
+ idrac_connection_user_info_mock, "/acounts/accdetails")
+ assert resp[0].get("UserName") == "test"
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.fetch_all_accounts",
+ return_value=([{"UserName": ""}]))
+ resp = self.module.get_all_accounts(
+ idrac_connection_user_info_mock, "/acounts/accdetails")
+ assert resp == []
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.fetch_all_accounts",
+ return_value=([]))
+ resp = self.module.get_all_accounts(
+ idrac_connection_user_info_mock, "/acounts/accdetails")
+ assert resp == []
+
+ def test_get_all_accounts_multiple(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ def strip_substr_dict_mock(acc):
+ if acc.get("UserName") == "test":
+ return {"UserName": "test"}
+ else:
+ return {"UserName": "test1"}
+ mocker.side_effect = strip_substr_dict_mock
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.fetch_all_accounts",
+ return_value=([{"UserName": "test"}, {"UserName": "test1"}]))
+ resp = self.module.get_all_accounts(
+ idrac_connection_user_info_mock, "/acounts/accdetails")
+ assert resp[0].get("UserName") == "test"
+ assert resp[1].get("UserName") == "test1"
+
+ def test_get_accounts_uri(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ acc_service_uri = MagicMock()
+ acc_service_uri.json_data = {"AccountService": {
+ "@odata.id": "/account"}, "Accounts": {"@odata.id": "/account/accountdetails"}}
+ acc_service = MagicMock()
+ acc_service.json_data = {"Accounts": {
+ "@odata.id": "/account/accountdetails"}}
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.iDRACRedfishAPI.invoke_request",
+ return_value=(acc_service_uri))
+ resp = self.module.get_accounts_uri(idrac_connection_user_info_mock)
+ assert resp == "/account/accountdetails"
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ idrac_connection_user_info_mock.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+
+ resp = self.module.get_accounts_uri(idrac_connection_user_info_mock)
+ assert resp == "/redfish/v1/AccountService/Accounts"
+
+ def test_user_info_main_success_case_all(self, idrac_default_args, idrac_connection_user_info_mock,
+ idrac_user_info_mock, mocker):
+ idrac_default_args.update({"username": "test"})
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_accounts_uri",
+ return_value=("/acounts/accdetails"))
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_user_name_accounts",
+ return_value=({"UserName": "test"}))
+ idrac_user_info_mock.status_code = 200
+ idrac_user_info_mock.success = True
+ resp = self._run_module(idrac_default_args)
+ assert resp['msg'] == "Successfully retrieved the user information."
+ assert resp['user_info'][0].get("UserName") == "test"
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_user_id_accounts",
+ return_value=({"UserName": "test"}))
+ idrac_default_args.update({"user_id": "1234"})
+ idrac_default_args.pop("username")
+ resp = self._run_module(idrac_default_args)
+ assert resp['msg'] == "Successfully retrieved the user information."
+ assert resp['user_info'][0].get("UserName") == "test"
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_all_accounts",
+ return_value=([{"UserName": "test"}]))
+ idrac_default_args.pop("user_id")
+ resp = self._run_module(idrac_default_args)
+ assert resp['msg'] == "Successfully retrieved the information of 1 user(s)."
+ assert resp['user_info'][0].get("UserName") == "test"
+
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_all_accounts",
+ return_value=([]))
+ resp = self._run_module_with_fail_json(idrac_default_args)
+ assert resp['failed'] is True
+ assert resp['msg'] == "Unable to retrieve the user information."
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_user_info_main_exception_handling_case(self, exc_type, mocker, idrac_default_args,
+ idrac_connection_user_info_mock, idrac_user_info_mock):
+ idrac_user_info_mock.status_code = 400
+ idrac_user_info_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_accounts_uri",
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + "idrac_user_info.get_accounts_uri",
+ side_effect=exc_type(HTTPS_ADDRESS, 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ if exc_type != URLError:
+ result = self._run_module_with_fail_json(idrac_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py
index 94e620f3e..5c7c32b44 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_virtual_media.py
@@ -15,15 +15,15 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_virtual_media
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
-from mock import PropertyMock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+ISO_PATH = "//XX.XX.XX.XX/path/file.iso"
+ISO_IMAGE_PATH = "//XX.XX.XX.XX/path/image_file.iso"
@pytest.fixture
@@ -40,17 +40,17 @@ class TestVirtualMedia(FakeAnsibleModule):
def test_validate_params(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
idrac_default_args.update(
- {"virtual_media": [{"index": 1, "insert": True, "image": "//192.168.0.1/path/image.iso"}]})
+ {"virtual_media": [{"index": 1, "insert": True, "image": "//XX.XX.XX.XX/path/image.iso"}]})
f_module = self.get_module_mock(params=idrac_default_args)
with pytest.raises(Exception) as err:
self.module._validate_params(f_module, {"index": 1, "insert": True,
- "image": "//192.168.0.1/path/image.iso"}, "140")
+ "image": "//XX.XX.XX.XX/path/image.iso"}, "140")
assert err.value.args[0] == "CIFS share required username and password."
idrac_default_args.update({"virtual_media": [{"index": 1, "insert": True, "username": "user", "password": "pwd",
- "image": "\\\\192.168.0.1\\path\\image.iso"}]})
+ "image": "\\\\XX.XX.XX.XX\\path\\image.iso"}]})
f_module = self.get_module_mock(params=idrac_default_args)
result = self.module._validate_params(f_module, {"password": "pwd", "insert": True, "username": "usr",
- "image": "\\\\192.168.0.1\\path\\image.iso", "index": 1},
+ "image": "\\\\XX.XX.XX.XX\\path\\image.iso", "index": 1},
"141")
assert result is None
@@ -59,7 +59,7 @@ class TestVirtualMedia(FakeAnsibleModule):
"RedfishVersion": "1.13.1",
"VirtualMedia": {"@odata.id": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia"},
"Members": [{"Inserted": False, "Image": None},
- {"Inserted": True, "Image": "//192.168.0.1/file_path/file.iso"}]
+ {"Inserted": True, "Image": "//XX.XX.XX.XX/file_path/file.iso"}]
}
resp, vr_id, rd_version = self.module.get_virtual_media_info(virtual_media_conn_mock)
assert vr_id == "system"
@@ -68,17 +68,17 @@ class TestVirtualMedia(FakeAnsibleModule):
assert vr_id == "manager"
def test_get_payload_data(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}]})
- each = {"insert": True, "image": "//192.168.0.1/path/file.iso", "index": 1, "media_type": "CD"}
- vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso",
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH}]})
+ each = {"insert": True, "image": ISO_PATH, "index": 1, "media_type": "CD"}
+ vr_member = [{"Inserted": True, "Image": ISO_IMAGE_PATH,
"UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
assert is_change is True
- assert input_vr_mem == {'Inserted': True, 'Image': '//192.168.0.1/path/file.iso'}
- assert vr_mem == {'Inserted': True, 'Image': '//192.168.0.1/path/image_file.iso', 'UserName': 'username',
+ assert input_vr_mem == {'Inserted': True, 'Image': '//XX.XX.XX.XX/path/file.iso'}
+ assert vr_mem == {'Inserted': True, 'Image': '//XX.XX.XX.XX/path/image_file.iso', 'UserName': 'username',
'Password': 'password', 'Id': 'CD', 'MediaTypes': ['CD', 'DVD']}
each.update({"username": "user_name", "password": "password", "domain": "domain",
- "image": "192.168.0.3:/file_path/image.iso"})
+ "image": "XX.XX.XX.XX:/file_path/image.iso"})
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
assert is_change is True
each.update({"media_type": "USBStick"})
@@ -90,25 +90,25 @@ class TestVirtualMedia(FakeAnsibleModule):
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "system")
assert is_change is True
each.update({"username": "user_name", "password": "password", "domain": "domain", "media_type": "CD",
- "image": "192.168.0.3:/file_path/image.img", "insert": True})
+ "image": "XX.XX.XX.XX:/file_path/image.img", "insert": True})
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
assert unsup_media == 1
each.update({"username": "user_name", "password": "password", "domain": "domain", "media_type": "DVD",
- "image": "192.168.0.3:/file_path/image.img", "insert": True})
+ "image": "XX.XX.XX.XX:/file_path/image.img", "insert": True})
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
assert unsup_media == 1
def test_domain_name(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args):
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}]})
- each = {"insert": True, "image": "//192.168.0.1/path/file.iso", "index": 1, "media_type": "CD",
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH}]})
+ each = {"insert": True, "image": ISO_PATH, "index": 1, "media_type": "CD",
"domain": "domain", "username": "user", "password": "pwd"}
- vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso", "domain": "domain",
+ vr_member = [{"Inserted": True, "Image": ISO_IMAGE_PATH, "domain": "domain",
"UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
is_change, input_vr_mem, vr_mem, unsup_media = self.module.get_payload_data(each, vr_member, "manager")
assert is_change is True
def test_virtual_media_operation(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH}],
"force": True})
f_module = self.get_module_mock(params=idrac_default_args)
mocker.patch(MODULE_PATH + 'idrac_virtual_media.time.sleep', return_value=None)
@@ -119,8 +119,8 @@ class TestVirtualMedia(FakeAnsibleModule):
"#VirtualMedia.InsertMedia": {
"target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/1/Actions/VirtualMedia.InsertMedia"}
}},
- "payload": {"Inserted": True, "Image": "http://192.168.0.1/file_path/file.iso"},
- "input": {"index": 1, "insert": True, "image": "//192.168.0.1/path/file.iso", "force": True}
+ "payload": {"Inserted": True, "Image": "https://XX.XX.XX.XX/file_path/file.iso"},
+ "input": {"index": 1, "insert": True, "image": ISO_PATH, "force": True}
}]
result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "manager")
assert result == []
@@ -138,7 +138,7 @@ class TestVirtualMedia(FakeAnsibleModule):
@pytest.mark.parametrize("exc_type", [HTTPError])
def test_virtual_media_operation_http(self, virtual_media_conn_mock, redfish_response_mock,
idrac_default_args, mocker, exc_type):
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH}],
"force": True})
f_module = self.get_module_mock(params=idrac_default_args)
mocker.patch(MODULE_PATH + 'idrac_virtual_media.time.sleep', return_value=None)
@@ -149,8 +149,8 @@ class TestVirtualMedia(FakeAnsibleModule):
"#VirtualMedia.InsertMedia": {
"target": "/redfish/v1/Systems/System.Embedded.1/VirtualMedia/CD/Actions/VirtualMedia.InsertMedia"}
}},
- "payload": {"Inserted": True, "Image": "http://192.168.0.1/file_path/file.iso"},
- "input": {"index": 1, "insert": True, "image": "//192.168.0.1/path/file.iso", "force": True}
+ "payload": {"Inserted": True, "Image": "https://XX.XX.XX.XX/file_path/file.iso"},
+ "input": {"index": 1, "insert": True, "image": ISO_PATH, "force": True}
}]
if exc_type == HTTPError:
mocker.patch(MODULE_PATH + 'idrac_virtual_media.json.load', return_value={
@@ -159,25 +159,25 @@ class TestVirtualMedia(FakeAnsibleModule):
json_str = to_text(json.dumps({"data": "out"}))
mocker.patch(
MODULE_PATH + 'idrac_virtual_media.time.sleep',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self.module.virtual_media_operation(virtual_media_conn_mock, f_module, payload, "system")
assert result == [{'@Message.ExtendedInfo': [{'MessageId': 'VRM0012'}]}]
def test_virtual_media(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
- vr_member = [{"Inserted": True, "Image": "//192.168.0.1/path/image_file.iso",
+ vr_member = [{"Inserted": True, "Image": ISO_IMAGE_PATH,
"UserName": "username", "Password": "password", "Id": "CD", "MediaTypes": ["CD", "DVD"]}]
mocker.patch(MODULE_PATH + 'idrac_virtual_media.virtual_media_operation', return_value=[])
mocker.patch(MODULE_PATH + 'idrac_virtual_media._validate_params', return_value=None)
mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(True, {}, {}, 1))
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso"}],
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH}],
"force": True})
f_module = self.get_module_mock(params=idrac_default_args)
with pytest.raises(Exception) as ex:
self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
assert ex.value.args[0] == "Unable to complete the virtual media operation because unsupported " \
"media type provided for index 1"
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.img"}],
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//XX.XX.XX.XX/path/file.img"}],
"force": True})
f_module = self.get_module_mock(params=idrac_default_args)
with pytest.raises(Exception) as ex:
@@ -188,7 +188,7 @@ class TestVirtualMedia(FakeAnsibleModule):
self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "system", "141")
assert ex.value.args[0] == "Unable to complete the virtual media operation because " \
"unsupported media type provided for index 1"
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso",
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH,
"index": 1, "media_type": "CD"}], "force": True})
f_module = self.get_module_mock(params=idrac_default_args)
mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_payload_data', return_value=(True, {}, {}, None))
@@ -202,7 +202,7 @@ class TestVirtualMedia(FakeAnsibleModule):
with pytest.raises(Exception) as ex:
self.module.virtual_media(virtual_media_conn_mock, f_module, vr_member, "manager", "141")
assert ex.value.args[0] == "Changes found to be applied."
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "//192.168.0.1/path/file.iso",
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": ISO_PATH,
"index": 1, "media_type": "CD"}], "force": False})
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = True
@@ -213,8 +213,8 @@ class TestVirtualMedia(FakeAnsibleModule):
def test_main_success(self, virtual_media_conn_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"virtual_media": [
- {"insert": True, "image": "http://192.168.0.1/path/file.iso"},
- {"insert": True, "image": "192.168.0.2:/file/file.iso"}], "force": True})
+ {"insert": True, "image": "https://XX.XX.XX.XX/path/file.iso"},
+ {"insert": True, "image": "YY.YY.YY.YY:/file/file.iso"}], "force": True})
mocker.patch(MODULE_PATH + 'idrac_virtual_media.get_virtual_media_info',
return_value=([{"Insert": True}, {"Insert": True}], "manager", "141"))
with pytest.raises(Exception) as ex:
@@ -222,7 +222,7 @@ class TestVirtualMedia(FakeAnsibleModule):
assert ex.value.args[0]["msg"] == "Unable to complete the operation because the virtual media settings " \
"provided exceeded the maximum limit."
mocker.patch(MODULE_PATH + 'idrac_virtual_media.virtual_media', return_value=[])
- idrac_default_args.update({"virtual_media": [{"insert": True, "image": "http://192.168.0.1/path/file.iso"}],
+ idrac_default_args.update({"virtual_media": [{"insert": True, "image": "https://XX.XX.XX.XX/path/file.iso"}],
"force": True})
result = self._run_module(idrac_default_args)
assert result == {'changed': True, 'msg': 'Successfully performed the virtual media operation.'}
@@ -241,7 +241,7 @@ class TestVirtualMedia(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'idrac_virtual_media.get_virtual_media_info',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py
index 1722a3daa..5f141775a 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_active_directory.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.0.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -97,14 +97,14 @@ class TestOmeAD(FakeAnsibleModule):
@pytest.mark.parametrize("params", [{
"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
"group_domain": "domain.com", "name": "domdev"},
- "get_ad": ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "get_ad": ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["XX.XX.XX.XX"],
"DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120,
"ServerPort": 3269, "CertificateValidation": False}, 1),
"msg": MODIFY_SUCCESS}, {
"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
"group_domain": "domain.com", "name": "domdev", "test_connection": True,
"domain_username": "user", "domain_password": "passwd"}, "get_ad":
- ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"], "DnsServer": [],
+ ({"Name": "ad_test", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["XX.XX.XX.XX"], "DnsServer": [],
"GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120, "ServerPort": 3269,
"CertificateValidation": False}, 1),
"msg": "{0}{1}".format(TEST_CONNECTION_SUCCESS, MODIFY_SUCCESS)},
@@ -116,7 +116,7 @@ class TestOmeAD(FakeAnsibleModule):
"msg": NO_CHANGES_MSG}, {
"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
"group_domain": "dellemcdomain.com", "name": "domdev"},
- "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["XX.XX.XX.XX"],
"DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120,
"SearchTimeOut": 120, "ServerPort": 3269, "CertificateValidation": False}, 1),
"msg": CHANGES_FOUND, "check_mode": True}
@@ -134,7 +134,7 @@ class TestOmeAD(FakeAnsibleModule):
@pytest.mark.parametrize("params", [{
"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
"group_domain": "domain.com", "name": "domdev", "state": "absent"},
- "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["XX.XX.XX.XX"],
"DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120, "SearchTimeOut": 120,
"ServerPort": 3269, "CertificateValidation": False}, 1),
"msg": DELETE_SUCCESS},
@@ -143,7 +143,7 @@ class TestOmeAD(FakeAnsibleModule):
"msg": NO_CHANGES_MSG}, {
"module_args": {"domain_controller_lookup": "MANUAL", "domain_server": ["192.96.20.181"],
"group_domain": "dellemcdomain.com", "name": "domdev", "state": "absent"},
- "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["192.168.20.181"],
+ "get_ad": ({"Name": "domdev", "Id": 21789, "ServerType": "MANUAL", "ServerName": ["XX.XX.XX.XX"],
"DnsServer": [], "GroupDomain": "dellemcdomain.com", "NetworkTimeOut": 120,
"SearchTimeOut": 120, "ServerPort": 3269, "CertificateValidation": False}, 1),
"msg": CHANGES_FOUND, "check_mode": True}
@@ -215,7 +215,7 @@ class TestOmeAD(FakeAnsibleModule):
ome_connection_mock_obj = rest_obj_class_mock.return_value.__enter__.return_value
if params.get("is_http"):
json_str = to_text(json.dumps(params['error_info']))
- ome_connection_mock_obj.invoke_request.side_effect = HTTPError('http://testdellemcomead.com', 404,
+ ome_connection_mock_obj.invoke_request.side_effect = HTTPError('https://testdellemcomead.com', 404,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str))
@@ -242,7 +242,7 @@ class TestOmeAD(FakeAnsibleModule):
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
- mocker.patch(MODULE_PATH + 'get_ad', side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ mocker.patch(MODULE_PATH + 'get_ad', side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies.py
new file mode 100644
index 000000000..1bf0c2e7c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies.py
@@ -0,0 +1,1578 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.3.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+import os
+import tempfile
+from datetime import datetime, timedelta
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_alert_policies
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_alert_policies.'
+
+SUCCESS_MSG = "Successfully {0}d the alert policy."
+NO_CHANGES_MSG = "No changes found to be applied."
+CHANGES_MSG = "Changes found to be applied."
+INVALID_TIME = "The specified {0} date or {0} time `{1}` to schedule the policy is not valid. Enter a valid date and time."
+END_START_TIME = "The end time `{0}` to schedule the policy must be greater than the start time `{1}`."
+CATEGORY_FETCH_FAILED = "Unable to retrieve the category details from OpenManage Enterprise."
+INVALID_TARGETS = "Specify target devices to apply the alert policy."
+INVALID_CATEGORY_MESSAGE = "Specify categories or message to create the alert policy."
+INVALID_SCHEDULE = "Specify a date and time to schedule the alert policy."
+INVALID_ACTIONS = "Specify alert actions for the alert policy."
+INVALID_SEVERITY = "Specify the severity to create the alert policy."
+MULTIPLE_POLICIES = "Unable to update the alert policies because the number of alert policies entered are more than " \
+ "one. The update policy operation supports only one alert policy at a time."
+DISABLED_ACTION = "Action {0} is disabled. Enable it before applying to the alert policy."
+ACTION_INVALID_PARAM = "The Action {0} attribute contains invalid parameter name {1}. The valid values are {2}."
+ACTION_INVALID_VALUE = "The Action {0} attribute contains invalid value for {1} for parameter name {2}. The valid " \
+ "values are {3}."
+ACTION_DIS_EXIST = "Action {0} does not exist."
+SUBCAT_IN_CATEGORY = "The subcategory {0} does not exist in the category {1}."
+CATEGORY_IN_CATALOG = "The category {0} does not exist in the catalog {1}."
+OME_DATA_MSG = "The {0} with the following {1} do not exist: {2}."
+CATALOG_DIS_EXIST = "The catalog {0} does not exist."
+CSV_PATH = "The message file {0} does not exist."
+DEFAULT_POLICY_DELETE = "The following default policies cannot be deleted: {0}."
+POLICY_ENABLE_MISSING = "Unable to {0} the alert policies {1} because the policy names are invalid. Enter the valid " \
+ "alert policy names and retry the operation."
+NO_POLICY_EXIST = "The alert policy does not exist."
+SEPARATOR = ", "
+
+
+@pytest.fixture
+def ome_connection_mock_for_alert_policies(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAlertPolicies(FakeAnsibleModule):
+ module = ome_alert_policies
+
+ @pytest.mark.parametrize("params", [
+ {"message": SUCCESS_MSG.format("enable"), "success": True,
+ "json_data": {"value": [{'Name': "new alert policy", "Id": 12, "Enabled": False}]},
+ "mparams": {"name": "new alert policy", "enable": True}},
+ {"message": CHANGES_MSG, "success": True, "check_mode": True,
+ "json_data": {"value": [{'Name': "new alert policy", "Id": 12, "Enabled": False}]},
+ "mparams": {"name": "new alert policy", "enable": True}},
+ {"message": MULTIPLE_POLICIES, "success": True,
+ "json_data": {"value": [{'Name': "alert policy1", "Id": 12, "Enabled": True},
+ {'Name': "alert policy2", "Id": 13, "Enabled": True}]},
+ "mparams": {"name": ["alert policy1", "alert policy2"], "enable": False, "description": 'Update case failed'}},
+ {"message": POLICY_ENABLE_MISSING.format("disable", "alert policy3"), "success": True,
+ "json_data": {"value": [{'Name': "alert policy1", "Id": 12, "Enabled": True},
+ {'Name': "alert policy2", "Id": 13, "Enabled": True}]},
+ "mparams": {"name": ["alert policy3", "alert policy2"], "enable": False}},
+ {"message": NO_CHANGES_MSG, "success": True, "check_mode": True,
+ "json_data": {"value": [{'Name': "new alert policy", "Id": 12, "Enabled": False}]},
+ "mparams": {"name": "new alert policy", "enable": False}},
+ {"message": SUCCESS_MSG.format("delete"), "success": True,
+ "json_data": {"report_list": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}],
+ "value": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}]},
+ "mparams": {"name": "new alert policy", "state": "absent"}},
+ {"message": CHANGES_MSG, "success": True, "check_mode": True,
+ "json_data": {"report_list": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}],
+ "value": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}]},
+ "mparams": {"name": "new alert policy", "state": "absent"}},
+ {"message": DEFAULT_POLICY_DELETE.format("new alert policy"), "success": True,
+ "json_data": {"report_list": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}],
+ "value": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": True}]},
+ "mparams": {"name": "new alert policy", "state": "absent"}},
+ {"message": NO_POLICY_EXIST, "success": True, "check_mode": True,
+ "json_data": {"report_list": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}],
+ "value": [{'Name': "new alert policy 1", "Id": 12, "DefaultPolicy": False}]},
+ "mparams": {"name": "new alert policy", "state": "absent"}},
+ {"message": NO_POLICY_EXIST, "success": True,
+ "json_data": {"report_list": [{'Name': "new alert policy", "Id": 12, "DefaultPolicy": False}],
+ "value": [{'Name': "new alert policy 1", "Id": 12, "DefaultPolicy": False}]},
+ "mparams": {"name": "new alert policy", "state": "absent"}},
+ ])
+ def test_ome_alert_policies_enable_delete(self, params, ome_connection_mock_for_alert_policies,
+ ome_response_mock, ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_alert_policies.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ trap_ip1 = "traphost1:162"
+ trap_ip2 = "traphost2:162"
+ trap_ip3 = "traphost3:514"
+ actions = [
+ {
+ "action_name": "Trap",
+ "parameters": [
+ {
+ "name": trap_ip2,
+ "value": "True"
+ }
+ ]
+ },
+ {
+ "action_name": "Mobile",
+ "parameters": []
+ },
+ {
+ "action_name": "Email",
+ "parameters": [
+ {
+ "name": "to",
+ "value": "email2@address.x"
+ },
+ {
+ "name": "from",
+ "value": "emailr@address.y"
+ },
+ {
+ "name": "subject",
+ "value": "test subject"
+ },
+ {
+ "name": "message",
+ "value": "test message"
+ }
+ ]
+ },
+ {
+ "action_name": "SMS",
+ "parameters": [
+ {
+ "name": "to",
+ "value": "1234567890"
+ }
+ ]
+ }
+ ]
+ create_input = {
+ "actions": actions,
+ "date_and_time": {
+ "date_from": (datetime.now() + timedelta(days=2)).strftime("%Y-%m-%d"),
+ "date_to": (datetime.now() + timedelta(days=3)).strftime("%Y-%m-%d"),
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "11:00",
+ "time_to": "12:00",
+ "time_interval": True
+ },
+ "description": "Description of Alert Policy One",
+ "device_group": [
+ "AX",
+ "Linux Servers"
+ ],
+ "enable": True,
+ "message_ids": [
+ "AMP400",
+ "CTL201",
+ "AMP401"
+ ],
+ "name": [
+ "Alert Policy One"
+ ],
+ "severity": [
+ "unknown"
+ ],
+ "state": "present"
+ }
+ get_alert_policy = [{
+ "Id": 24792,
+ "Name": "Alert Policy One",
+ "Description": "CREATIOn of Alert Policy One",
+ "Enabled": True,
+ "DefaultPolicy": False,
+ "Editable": True,
+ "Visible": True,
+ "PolicyData": {
+ "Catalogs": [],
+ "Severities": [
+ 1
+ ],
+ "MessageIds": [
+ "'AMP401'",
+ "'AMP400'",
+ "'CTL201'"
+ ],
+ "Devices": [],
+ "DeviceTypes": [],
+ "Groups": [
+ 1011,
+ 1033
+ ],
+ "Schedule": {
+ "StartTime": "2023-10-09 00:00:00.000",
+ "EndTime": "2023-10-11 00:00:00.000",
+ "CronString": "* * * ? * mon,sun *",
+ "Interval": False
+ },
+ "Actions": [
+ {
+ "Id": 499,
+ "Name": "RemoteCommand",
+ "ParameterDetails": [
+ {
+ "Id": 0,
+ "Name": "remotecommandaction1",
+ "Value": "test",
+ "Type": "singleSelect",
+ "TypeParams": [
+ {
+ "Name": "option",
+ "Value": "test"
+ }
+ ]
+ }
+ ],
+ "TemplateId": 111
+ }
+ ],
+ "AllTargets": False,
+ "UndiscoveredTargets": []
+ },
+ "State": True,
+ "Owner": 10078
+ }]
+ get_all_actions = {
+ "Email": {
+ "Disabled": False,
+ "Id": 50,
+ "Parameters": {
+ "from": "admin@dell.com",
+ "message": "Event occurred for Device Name",
+ "subject": "Device Name: $name, Device IP Address: $ip, Severity: $severity",
+ "to": ""
+ },
+ "Type": {
+ "from": [],
+ "message": [],
+ "subject": [],
+ "to": []
+ }
+ },
+ "Ignore": {
+ "Disabled": False,
+ "Id": 100,
+ "Parameters": {},
+ "Type": {}
+ },
+ "Mobile": {
+ "Disabled": False,
+ "Id": 112,
+ "Parameters": {},
+ "Type": {}
+ },
+ "PowerControl": {
+ "Disabled": False,
+ "Id": 110,
+ "Parameters": {
+ "powercontrolaction": "poweroff"
+ },
+ "Type": {
+ "powercontrolaction": [
+ "powercycle",
+ "poweroff",
+ "poweron",
+ "gracefulshutdown"
+ ]
+ }
+ },
+ "RemoteCommand": {
+ "Disabled": False,
+ "Id": 111,
+ "Parameters": {
+ "remotecommandaction": "test"
+ },
+ "Type": {
+ "remotecommandaction": [
+ "test",
+ "cmd2 : XX.XX.XX.XX"
+ ]
+ }
+ },
+ "SMS": {
+ "Disabled": False,
+ "Id": 70,
+ "Parameters": {
+ "to": ""
+ },
+ "Type": {
+ "to": []
+ }
+ },
+ "Syslog": {
+ "Disabled": False,
+ "Id": 90,
+ "Parameters": {
+ trap_ip3: "true"
+ },
+ "Type": {
+ trap_ip3: [
+ "true",
+ "false"
+ ]
+ }
+ },
+ "Trap": {
+ "Disabled": False,
+ "Id": 60,
+ "Parameters": {
+ trap_ip1: "true",
+ trap_ip2: "true"
+ },
+ "Type": {
+ trap_ip1: [
+ "true",
+ "false"
+ ],
+ trap_ip2: [
+ "true",
+ "false"
+ ]
+ }
+ }
+ }
+ get_category_data_tree = {
+ 'Application': {
+ 'Audit': {
+ 4: {
+ 'Devices': 90,
+ 'Generic': 10,
+ 'Power Configuration': 151,
+ 'Users': 35
+ }
+ },
+ 'Configuration': {
+ 5: {
+ 'Application': 85,
+ 'Device Warranty': 116,
+ 'Devices': 90,
+ 'Discovery': 36,
+ 'Generic': 10,
+ 'Users': 35
+ }
+ },
+ 'Miscellaneous': {
+ 7: {
+ 'Miscellaneous': 20
+ }
+ },
+ 'Storage': {
+ 2: {
+ 'Devices': 90
+ }
+ },
+ 'System Health': {
+ 1: {
+ 'Devices': 90,
+ 'Health Status of Managed device': 7400,
+ 'Job': 47,
+ 'Metrics': 118,
+ 'Power Configuration': 151
+ }
+ },
+ 'Updates': {
+ 3: {
+ 'Application': 85,
+ 'Firmware': 112
+ }
+ }
+ },
+ 'Dell Storage': {
+ 'Storage': {
+ 2: {
+ 'Other': 7700
+ }
+ },
+ 'System Health': {
+ 1: {
+ 'Other': 7700,
+ 'Storage': 18
+ }
+ }
+ },
+ 'Storage': {'Audit': {
+ 4: {
+ 'Interface': 101
+ }
+ }},
+ 'iDRAC': {
+ 'Audit': {
+ 4: {
+ 'Interface': 101
+ }
+ }
+ },
+ }
+
+ @pytest.mark.parametrize("params", [
+ {"message": SUCCESS_MSG.format("create"), "success": True,
+ "mparams": create_input,
+ "get_alert_policies": [],
+ "validate_ome_data": (["AMP400", "AMP401", "CTL201"],),
+ "get_severity_payload": {"Severities": ["unknown"]},
+ "get_all_actions": get_all_actions,
+ "json_data": {"value": [{'Name': "new alert policy 1", "Id": 12, "DefaultPolicy": False}]}},
+ {"message": CHANGES_MSG, "success": True,
+ "check_mode": True,
+ "mparams": create_input,
+ "get_alert_policies": [],
+ "validate_ome_data": (["AMP400", "AMP401", "CTL201"],),
+ "get_severity_payload": {"Severities": ["unknown"]},
+ "get_all_actions": get_all_actions,
+ "json_data": {"value": [{'Name': "new alert policy 1", "Id": 12, "DefaultPolicy": False}]}},
+ {"message": SUCCESS_MSG.format("update"), "success": True,
+ "mparams": create_input,
+ "get_alert_policies": get_alert_policy,
+ "validate_ome_data": (["AMP400", "AMP401", "CTL201"],),
+ "get_category_data_tree": get_category_data_tree,
+ "get_all_actions": get_all_actions,
+ "json_data": {
+ "value": [
+ {
+
+ "Id": 1,
+ "Name": "Unknown",
+ "Description": "Unknown"
+ },
+ {
+ "Id": 2,
+ "Name": "Info",
+ "Description": "Info"
+ },
+ {
+ "Id": 4,
+ "Name": "Normal",
+ "Description": "Normal"
+ },
+ {
+ "Id": 8,
+ "Name": "Warning",
+ "Description": "Warning"
+ },
+ {
+ "Id": 16,
+ "Name": "Critical",
+ "Description": "Critical"
+ }
+ ]
+ }},
+ {"message": SUCCESS_MSG.format("update"), "success": True,
+ "mparams": {
+ "actions": [
+ {
+ "action_name": "Ignore",
+ "parameters": []
+ }
+ ],
+ "description": "Description of Alert Policy One",
+ "specific_undiscovered_devices": [
+ "host1",
+ "192.1.2.3-192.1.2.10"
+ ],
+ "enable": True,
+ "category": [
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audit",
+ "sub_category_names": [
+ "Users",
+ "Generic"
+ ]
+ }
+ ],
+ "catalog_name": "Application"
+ },
+ {
+ "catalog_category": [
+ {
+ "category_name": "Storage",
+ "sub_category_names": [
+ "Other"
+ ]
+ }
+ ],
+ "catalog_name": "Dell Storage"
+ },
+ {"catalog_name": "Storage"},
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audit",
+ "sub_category_names": []
+ }
+ ],
+ "catalog_name": "iDRAC"
+ }
+ ],
+ "name": [
+ "Alert Policy One"
+ ],
+ "new_name": "Alert Policy Renamed",
+ "severity": [
+ "unknown"
+ ],
+ "state": "present"
+ },
+ "get_alert_policies": get_alert_policy,
+ "validate_ome_data": (["AMP400", "AMP401", "CTL201"],),
+ "get_category_data_tree": get_category_data_tree,
+ "get_all_actions": get_all_actions,
+ "json_data": {"value": []}
+ },
+ {"message": OME_DATA_MSG.format("groups", "Name", "Linux Servers"), "success": True,
+ "mparams": {
+ "device_group": [
+ "AX",
+ "Linux Servers"
+ ],
+ "state": "present",
+ "name": "Test alert policy"
+ },
+ "get_alert_policies": get_alert_policy,
+ "json_data": {
+ "@odata.count": 102,
+ "@odata.nextLink": "/AlertPolicies",
+ "value": [{"Name": "AX", "Id": 121},
+ {"Name": "Group2", "Id": 122}]}
+ },
+ {"message": OME_DATA_MSG.format("groups", "Name", "Linux Servers"), "success": True,
+ "mparams": {
+ "device_group": [
+ "AX",
+ "Linux Servers"
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "Coverage for filter block in validate_ome_data"
+ },
+ "get_alert_policies": [{
+ "Id": 1234,
+ "Name": "Alert Policy Two",
+ "Description": "Alert Policy Two described",
+ "Enabled": True,
+ "DefaultPolicy": False,
+ "Editable": True,
+ "Visible": True,
+ "PolicyData": {
+ "Catalogs": [],
+ "Severities": [
+ 16
+ ],
+ "MessageIds": [
+ "'AMP403'",
+ "'AMP400'",
+ "'BIOS108'"
+ ],
+ "Devices": [],
+ "DeviceTypes": [],
+ "Groups": [
+ 111,
+ 133
+ ],
+ "Schedule": {
+ "StartTime": "2023-11-09 00:00:00.000",
+ "EndTime": "2023-11-11 00:00:00.000",
+ "CronString": "* * * ? * mon,sun *",
+ "Interval": False
+ },
+ "Actions": [
+ {
+ "Id": 499,
+ "Name": "RemoteCommand",
+ "ParameterDetails": [
+ {
+ "Id": 0,
+ "Name": "remotecommandaction1",
+ "Value": "test",
+ "Type": "singleSelect",
+ "TypeParams": [
+ {
+ "Name": "option",
+ "Value": "test"
+ }
+ ]
+ }
+ ],
+ "TemplateId": 111
+ }
+ ],
+ "AllTargets": False,
+ "UndiscoveredTargets": []
+ },
+ "State": True,
+ "Owner": 10078
+ }],
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"Name": "AX", "Id": 121},
+ {"Name": "Group2", "Id": 122}]}
+ },
+ {"message": INVALID_CATEGORY_MESSAGE, "success": True,
+ "mparams": {
+ "device_service_tag": [
+ "ABC1234",
+ "SVCTAG1"
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "Coverage for filter block in validate_ome_data"
+ },
+ "get_alert_policies": [],
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"DeviceServiceTag": "ABC1234", "Id": 121, "Type": 1000},
+ {"DeviceServiceTag": "SVCTAG1", "Id": 122, "Type": 1000}]}
+ },
+ {"message": INVALID_CATEGORY_MESSAGE, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "all devices coverage"
+ },
+ "get_alert_policies": [],
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"DeviceServiceTag": "ABC1234", "Id": 121, "Type": 1000},
+ {"DeviceServiceTag": "SVCTAG1", "Id": 122, "Type": 1000}]}
+ },
+ {"message": INVALID_CATEGORY_MESSAGE, "success": True,
+ "mparams": {
+ "any_undiscovered_devices": True,
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "all devices coverage"
+ },
+ "get_alert_policies": [],
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"DeviceServiceTag": "ABC1234", "Id": 121, "Type": 1000},
+ {"DeviceServiceTag": "SVCTAG1", "Id": 122, "Type": 1000}]}
+ },
+ {"message": INVALID_CATEGORY_MESSAGE, "success": True,
+ "mparams": {
+ "specific_undiscovered_devices": [
+ "192.1.2.3-192.1.2.10",
+ "hostforpolicy.domain.com"
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "all devices coverage"
+ },
+ "get_alert_policies": [],
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"DeviceServiceTag": "ABC1234", "Id": 121, "Type": 1000},
+ {"DeviceServiceTag": "SVCTAG1", "Id": 122, "Type": 1000}]}
+ },
+ {"message": INVALID_SCHEDULE, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "message_file": "{0}/{1}".format(tempfile.gettempdir(), "myfile.csv"),
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "all devices coverage"
+ },
+ "get_alert_policies": [],
+ "create_temp_file": "MessageIds\nMSGID1",
+ "json_data": {
+ "@odata.count": 300,
+ "value": [{"MessageId": "MSGID1", "Id": 121, "Type": 1000},
+ {"MessageId": "MSGID2", "Id": 122, "Type": 1000}]}
+ },
+ {"message": INVALID_SCHEDULE, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "category": [
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audit",
+ "sub_category_names": [
+ "Users",
+ "Generic"
+ ]
+ }
+ ],
+ "catalog_name": "Application"
+ },
+ {
+ "catalog_category": [
+ {
+ "category_name": "Storage",
+ "sub_category_names": [
+ "Other"
+ ]
+ }
+ ],
+ "catalog_name": "Dell Storage"
+ }
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_category_data_tree coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "json_data": {
+ "value": [
+ {
+ "Name": "Application",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices description"
+ },
+ {
+ "Id": 10,
+ "Name": "Generic",
+ "Description": "Generic description"
+ },
+ {
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration description"
+ },
+ {
+ "Id": 35,
+ "Name": "Users",
+ "Description": "Users description"
+ }
+ ]
+ },
+ {
+ "Id": 7,
+ "Name": "Miscellaneous",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "Id": 20,
+ "Name": "Miscellaneous",
+ "Description": "Miscellaneous description"
+ }
+ ]
+ },
+ {
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices description"
+ }
+ ]
+ },
+ {
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices description"
+ },
+ {
+ "Id": 7400,
+ "Name": "Health Status of Managed device",
+ "Description": "Health Status of Managed device description"
+ },
+ {
+ "Id": 47,
+ "Name": "Job",
+ "Description": "Job description"
+ },
+ {
+ "Id": 118,
+ "Name": "Metrics",
+ "Description": "Metrics description"
+ },
+ {
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration description"
+ }
+ ]
+ },
+ {
+ "Id": 3,
+ "Name": "Updates",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "Id": 85,
+ "Name": "Application",
+ "Description": "Application description"
+ },
+ {
+ "Id": 112,
+ "Name": "Firmware",
+ "Description": "Firmware description"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "Name": "Dell Storage",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "Dell Storage",
+ "SubCategoryDetails": [
+ {
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other description"
+ }
+ ]
+ },
+ {
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Dell Storage",
+ "SubCategoryDetails": [
+ {
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other description"
+ },
+ {
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage description"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {"message": INVALID_SEVERITY, "success": True,
+ "mparams": {
+ "actions": actions,
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_all_actions coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "get_schedule_payload": {"StartTime": "", "EndTime": ""},
+ "get_severity_payload": {},
+ "json_data": {
+ "value": [
+ {
+ "Name": "Email",
+ "Description": "Email",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "subject",
+ "Value": "Device Name: $name, Device IP Address: $ip, Severity: $severity",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ },
+ {
+ "Id": 2,
+ "Name": "to",
+ "Value": "",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ },
+ {
+ "Id": 3,
+ "Name": "from",
+ "Value": "admin@dell.com",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ },
+ {
+ "Id": 4,
+ "Name": "message",
+ "Value": "Event occurred for Device Name",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(60)",
+ "Id": 60,
+ "Name": "Trap",
+ "Description": "Trap",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": trap_ip1,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ },
+ {
+ "Id": 2,
+ "Name": trap_ip2,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(90)",
+ "Id": 90,
+ "Name": "Syslog",
+ "Description": "Syslog",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": trap_ip3,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(100)",
+ "Id": 100,
+ "Name": "Ignore",
+ "Description": "Ignore",
+ "Disabled": False,
+ "ParameterDetails": []
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(70)",
+ "Id": 70,
+ "Name": "SMS",
+ "Description": "SMS",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "to",
+ "Value": "",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(110)",
+ "Id": 110,
+ "Name": "PowerControl",
+ "Description": "Power Control Action Template",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "powercontrolaction",
+ "Value": "poweroff",
+ "Type": "singleSelect",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "option",
+ "Value": "powercycle"
+ },
+ {
+ "Name": "option",
+ "Value": "poweroff"
+ },
+ {
+ "Name": "option",
+ "Value": "poweron"
+ },
+ {
+ "Name": "option",
+ "Value": "gracefulshutdown"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(111)",
+ "Id": 111,
+ "Name": "RemoteCommand",
+ "Description": "RemoteCommand",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "remotecommandaction",
+ "Value": "test",
+ "Type": "singleSelect",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "option",
+ "Value": "test"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertActionTemplate",
+ "@odata.id": "/api/AlertService/AlertActionTemplates(112)",
+ "Id": 112,
+ "Name": "Mobile",
+ "Description": "Mobile",
+ "Disabled": False,
+ "ParameterDetails": []
+ }
+ ]
+ }
+ },
+ {"message": DISABLED_ACTION.format("SMS"), "success": True,
+ "mparams": {
+ "actions": [{
+ "action_name": "SMS",
+ "parameters": [
+ {
+ "name": "to",
+ "value": "1234567890"
+ }
+ ]
+ }],
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_all_actions coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "get_schedule_payload": {"StartTime": "", "EndTime": ""},
+ "get_severity_payload": {},
+ "json_data": {
+ "value": [
+ {
+ "Id": 70,
+ "Name": "SMS",
+ "Description": "SMS",
+ "Disabled": True,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "to",
+ "Value": "",
+ "Type": "string",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "Id": 112,
+ "Name": "Mobile",
+ "Description": "Mobile",
+ "Disabled": False,
+ "ParameterDetails": []
+ }
+ ]
+ }
+ },
+ {"message": ACTION_INVALID_PARAM.format("Trap", "traphost2:162", "traphost1:162"), "success": True,
+ "mparams": {
+ "actions": [{
+ "action_name": "Trap",
+ "parameters": [
+ {
+ "name": trap_ip2,
+ "value": "True"
+ }
+ ]
+ }],
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_all_actions coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "get_schedule_payload": {"StartTime": "", "EndTime": ""},
+ "get_severity_payload": {},
+ "json_data": {
+ "value": [
+ {
+ "Id": 100,
+ "Name": "SMS",
+ "Description": "Ignore",
+ "Disabled": False,
+ "ParameterDetails": []
+ },
+ {
+ "Id": 60,
+ "Name": "Trap",
+ "Description": "Trap",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": trap_ip1,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {"message": ACTION_INVALID_VALUE.format("Trap", "Truthy", "traphost1:162", "true, false"), "success": True,
+ "mparams": {
+ "actions": [{
+ "action_name": "Trap",
+ "parameters": [
+ {
+ "name": trap_ip1,
+ "value": "Truthy"
+ }
+ ]
+ }],
+ "all_devices": True,
+ "message_ids": ["AMP01", "CTL201"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "actions invalid coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Devices": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["AMP01", "CTL201"]},
+ "get_schedule_payload": {"StartTime": "2023-11-01 11:00:00.000", "EndTime": "2023-12-01 12:00:00.000"},
+ "get_severity_payload": {},
+ "json_data": {
+ "value": [
+ {
+ "Id": 60,
+ "Name": "Trap",
+ "Description": "Trap",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": trap_ip1,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ }
+ ]
+ }]
+ }
+ },
+ {"message": ACTION_DIS_EXIST.format("SNMPTrap"), "success": True,
+ "mparams": {
+ "actions": [{
+ "action_name": "SNMPTrap",
+ "parameters": [
+ {
+ "name": trap_ip1,
+ "value": "true"
+ }
+ ]
+ }],
+ "all_devices": True,
+ "message_ids": ["BIOS101", "RND123"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "No existing action coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG23", "MSG46"]},
+ "get_schedule_payload": {"StartTime": "2023-11-01 11:00:00.000", "EndTime": "2023-12-01 12:00:00.000"},
+ "get_severity_payload": {},
+ "json_data": {
+ "value": [
+ {
+ "Id": 60,
+ "Name": "Trap",
+ "Description": "Trap",
+ "Disabled": False,
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": trap_ip1,
+ "Value": "true",
+ "Type": "boolean",
+ "TemplateParameterTypeDetails": []
+ }
+ ]
+ }]
+ }
+ },
+ {"message": INVALID_TIME.format("from", "2023-20-01 11:00:00.000"), "success": True,
+ "mparams": {
+ "date_and_time": {
+ "date_from": "2023-20-01",
+ "date_to": "2023-10-02",
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "11:00",
+ "time_to": "12:00",
+ "time_interval": True
+ },
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": INVALID_TIME.format("from", "2023-10-01 31:00:00.000"), "success": True,
+ "mparams": {
+ "date_and_time": {
+ "date_from": "2023-10-01",
+ "date_to": "2023-10-02",
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "31:00",
+ "time_to": "12:00",
+ "time_interval": True
+ },
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": END_START_TIME.format("2023-10-01 12:00:00", "2023-10-02 11:00:00"), "success": True,
+ "mparams": {
+ "date_and_time": {
+ "date_from": "2023-10-02",
+ "date_to": "2023-10-01",
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "11:00",
+ "time_to": "12:00",
+ "time_interval": True
+ },
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": INVALID_TIME.format("to", "2023-10-32 32:00:00.000"), "success": True,
+ "mparams": {
+ "date_and_time": {
+ "date_from": "2023-10-01",
+ "date_to": "2023-10-32",
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "11:00",
+ "time_to": "32:00",
+ "time_interval": True
+ },
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": INVALID_TARGETS, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "INVALID_TARGETS coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": INVALID_ACTIONS, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "message_ids": ["MSG01", "MSG02"],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage",
+ "date_and_time": {
+ "date_from": "2023-10-01",
+ "days": [
+ "sunday",
+ "monday"
+ ],
+ "time_from": "11:00",
+ "time_to": "12:00",
+ "time_interval": True
+ },
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_or_message": {"MessageIds": ["MSG01", "MSG02"]},
+ "get_actions_payload": {},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": CATEGORY_FETCH_FAILED, "success": True,
+ "mparams": {
+ "all_devices": True,
+ "category": [
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audit",
+ "sub_category_names": [
+ "Users",
+ "Generic"
+ ]
+ }
+ ],
+ "catalog_name": "Application"
+ }
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_data_tree": {},
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": SUBCAT_IN_CATEGORY.format("General", "Audit"), "success": True,
+ "mparams": {
+ "all_devices": True,
+ "category": [
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audit",
+ "sub_category_names": [
+ "General",
+ "Generic"
+ ]
+ }
+ ],
+ "catalog_name": "Application"
+ }
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_data_tree": get_category_data_tree,
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": CATEGORY_IN_CATALOG.format("Audi", "Application"), "success": True,
+ "mparams": {
+ "all_devices": True,
+ "category": [
+ {
+ "catalog_category": [
+ {
+ "category_name": "Audi",
+ "sub_category_names": [
+ "General",
+ "Generic"
+ ]
+ }
+ ],
+ "catalog_name": "Application"
+ }
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_data_tree": get_category_data_tree,
+ "json_data": {
+ "value": []
+ }
+ },
+ {"message": CATALOG_DIS_EXIST.format("Alpha"), "success": True,
+ "mparams": {
+ "all_devices": True,
+ "category": [
+ {
+ "catalog_name": "Alpha"
+ }
+ ],
+ "state": "present",
+ "name": "Test alert policy",
+ "description": "get_schedule coverage"
+ },
+ "get_alert_policies": [],
+ "get_target_payload": {"Groups": [123, 124]},
+ "get_category_data_tree": get_category_data_tree,
+ "json_data": {
+ "value": []
+ }
+ }
+ ])
+ def test_ome_alert_policies_state_present(self, params, ome_connection_mock_for_alert_policies,
+ ome_response_mock, ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_alert_policies.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ mocks = ["get_alert_policies", "validate_ome_data", "get_target_payload",
+ "get_all_actions", "get_severity_payload", "get_category_data_tree",
+ "get_schedule_payload", "get_category_or_message"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ if "create_temp_file" in params:
+ with open(f"{params['mparams'].get('message_file')}", 'w', encoding='utf-8') as fp:
+ fp.write(params["create_temp_file"])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ if "create_temp_file" in params:
+ fpath = f"{params['mparams'].get('message_file')}"
+ if os.path.exists(fpath):
+ os.remove(fpath)
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [SSLValidationError, ConnectionError, TypeError, ValueError, OSError, HTTPError, URLError])
+ def test_ome_alert_policies_category_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_alert_policies,
+ ome_response_mock):
+ json_str = to_text(json.dumps({"data": "out"}))
+ ome_default_args.update({"name": "new alert policy", "enable": True})
+ if exc_type == HTTPError:
+ mocker.patch(MODULE_PATH + 'get_alert_policies', side_effect=exc_type(
+ 'https://testhost.com', 401, 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module(ome_default_args)
+ assert result['failed'] is True
+ elif exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_alert_policies',
+ side_effect=exc_type("exception message"))
+ result = self._run_module(ome_default_args)
+ assert result['unreachable'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_alert_policies',
+ side_effect=exc_type("exception message"))
+ result = self._run_module(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py
new file mode 100644
index 000000000..a5ebba338
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_alert_policies_actions_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_alert_policies_actions_info_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_alert_policies_actions_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAlertPoliciesActionsInfo(FakeAnsibleModule):
+ module = ome_alert_policies_actions_info
+
+ def test_ome_alert_policies_action_info_main_success_case_all(self,
+ ome_alert_policies_actions_info_mock,
+ ome_default_args, ome_response_mock):
+ ome_response_mock.json_data = {"value": [
+ {
+ "Description": "Email",
+ "Disabled": False,
+ "Id": 50,
+ "Name": "Email",
+ "ParameterDetails": [
+ {
+ "Id": 1,
+ "Name": "subject",
+ "TemplateParameterTypeDetails": [
+ {
+ "Name": "maxLength",
+ "Value": "255"
+ }
+ ],
+ "Type": "string",
+ "Value": "Device Name: $name, Device IP Address: $ip, Severity: $severity"
+ }]}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert 'actions' in result
+
+ def test_ome_alert_policies_action_info_empty_case(self, ome_default_args,
+ ome_alert_policies_actions_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['actions'] == []
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_ome_alert_policies_action_info_main_exception_handling_case(self, exc_type, ome_default_args,
+ ome_alert_policies_actions_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_alert_policies_actions_info_mock.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_alert_policies_actions_info_mock.invoke_request.side_effect = exc_type('https://testhost.com',
+ 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module(ome_default_args)
+ if exc_type != URLError:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_category_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_category_info.py
new file mode 100644
index 000000000..b2ff4a7d9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_category_info.py
@@ -0,0 +1,2670 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_alert_policies_category_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_alert_policies_category_info.'
+SUCCESS_MSG = "Successfully retrieved alert policies category information."
+
+
+@pytest.fixture
+def ome_connection_mock_for_alert_category(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAlertCategoryInfo(FakeAnsibleModule):
+ module = ome_alert_policies_category_info
+
+ @pytest.mark.parametrize("params", [
+ {"message": SUCCESS_MSG,
+ "json_data": {
+ "@odata.context": "/api/$metadata#Collection(AlertService.AlertCategories)",
+ "@odata.count": 13,
+ "value": [
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('Application')",
+ "Name": "Application",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 10,
+ "Name": "Generic",
+ "Description": "Generic"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 35,
+ "Name": "Users",
+ "Description": "Users"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 5,
+ "Name": "Configuration",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 85,
+ "Name": "Application",
+ "Description": "Application"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 116,
+ "Name": "Device Warranty",
+ "Description": "Device Warranty"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 36,
+ "Name": "Discovery",
+ "Description": "Discovery"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 10,
+ "Name": "Generic",
+ "Description": "Generic"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 84,
+ "Name": "Groups",
+ "Description": "Groups"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 47,
+ "Name": "Job",
+ "Description": "Job"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 118,
+ "Name": "Metrics",
+ "Description": "Metrics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 20,
+ "Name": "Miscellaneous",
+ "Description": "Miscellaneous"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 93,
+ "Name": "Monitoring",
+ "Description": "Monitoring"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 31,
+ "Name": "Reports",
+ "Description": "Reports"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 9,
+ "Name": "Security",
+ "Description": "Security"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 88,
+ "Name": "Templates",
+ "Description": "Templates"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 35,
+ "Name": "Users",
+ "Description": "Users"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 7,
+ "Name": "Miscellaneous",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 20,
+ "Name": "Miscellaneous",
+ "Description": "Miscellaneous"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7400,
+ "Name": "Health Status of Managed device",
+ "Description": "Health Status of Managed device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 47,
+ "Name": "Job",
+ "Description": "Job"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 118,
+ "Name": "Metrics",
+ "Description": "Metrics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 3,
+ "Name": "Updates",
+ "CatalogName": "Application",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 85,
+ "Name": "Application",
+ "Description": "Application"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 112,
+ "Name": "Firmware",
+ "Description": "Firmware"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('Dell%20Storage')",
+ "Name": "Dell Storage",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "Dell Storage",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Dell Storage",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('iDRAC')",
+ "Name": "iDRAC",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 41,
+ "Name": "Auto System Reset",
+ "Description": "Auto System Reset"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 54,
+ "Name": "BIOS Management",
+ "Description": "BIOS Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 75,
+ "Name": "BIOS POST",
+ "Description": "BIOS POST"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 12,
+ "Name": "Debug",
+ "Description": "Debug"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 53,
+ "Name": "Group Manager",
+ "Description": "Group Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 45,
+ "Name": "iDRAC Service Module",
+ "Description": "iDRAC Service Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 114,
+ "Name": "IP Address",
+ "Description": "IP Address"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 122,
+ "Name": "iSM PEEK Component",
+ "Description": "iSM PEEK Component"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 48,
+ "Name": "Licensing",
+ "Description": "Licensing"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 96,
+ "Name": "OS Event",
+ "Description": "OS Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 28,
+ "Name": "Power Usage POW",
+ "Description": "Power Usage POW"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 120,
+ "Name": "Secure Enterprise Key Management",
+ "Description": "Secure Enterprise Key Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 56,
+ "Name": "User Tracking",
+ "Description": "User Tracking"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 5,
+ "Name": "Configuration",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 49,
+ "Name": "Auto-Discovery",
+ "Description": "Auto-Discovery"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 107,
+ "Name": "Backup/Restore",
+ "Description": "Backup/Restore"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 54,
+ "Name": "BIOS Management",
+ "Description": "BIOS Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 104,
+ "Name": "BOOT Control",
+ "Description": "BOOT Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 59,
+ "Name": "Certificate Management",
+ "Description": "Certificate Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 51,
+ "Name": "Firmware Download",
+ "Description": "Firmware Download"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 53,
+ "Name": "Group Manager",
+ "Description": "Group Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 98,
+ "Name": "IO Identity Optimization",
+ "Description": "IO Identity Optimization"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 105,
+ "Name": "IO Virtualization",
+ "Description": "IO Virtualization"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 114,
+ "Name": "IP Address",
+ "Description": "IP Address"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 27,
+ "Name": "Job Control",
+ "Description": "Job Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 57,
+ "Name": "Lifecycle Controller",
+ "Description": "Lifecycle Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 3,
+ "Name": "Link Status",
+ "Description": "Link Status"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 123,
+ "Name": "Liquid Cooling System",
+ "Description": "Liquid Cooling System"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 80,
+ "Name": "Memory",
+ "Description": "Memory"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 102,
+ "Name": "NIC Configuration",
+ "Description": "NIC Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 97,
+ "Name": "OS Deployment",
+ "Description": "OS Deployment"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 61,
+ "Name": "Processor",
+ "Description": "Processor"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 120,
+ "Name": "Secure Enterprise Key Management",
+ "Description": "Secure Enterprise Key Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 113,
+ "Name": "Storage Controller",
+ "Description": "Storage Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 29,
+ "Name": "System Event Log",
+ "Description": "System Event Log"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 79,
+ "Name": "Test Alert",
+ "Description": "Test Alert"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 66,
+ "Name": "vFlash Event",
+ "Description": "vFlash Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7,
+ "Name": "Virtual Console",
+ "Description": "Virtual Console"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 108,
+ "Name": "Battery Event",
+ "Description": "Battery Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 94,
+ "Name": "Physical Disk",
+ "Description": "Physical Disk"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 44,
+ "Name": "Redundancy",
+ "Description": "Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 119,
+ "Name": "Software Defined Storage",
+ "Description": "Software Defined Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 113,
+ "Name": "Storage Controller",
+ "Description": "Storage Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 82,
+ "Name": "Storage Enclosure",
+ "Description": "Storage Enclosure"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 46,
+ "Name": "Virtual Disk",
+ "Description": "Virtual Disk"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 67,
+ "Name": "Amperage",
+ "Description": "Amperage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 41,
+ "Name": "Auto System Reset",
+ "Description": "Auto System Reset"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 108,
+ "Name": "Battery Event",
+ "Description": "Battery Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 75,
+ "Name": "BIOS POST",
+ "Description": "BIOS POST"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 89,
+ "Name": "Cable",
+ "Description": "Cable"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 83,
+ "Name": "Fibre Channel",
+ "Description": "Fibre Channel"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 45,
+ "Name": "iDRAC Service Module",
+ "Description": "iDRAC Service Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 111,
+ "Name": "IDSDM Redundancy",
+ "Description": "IDSDM Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 105,
+ "Name": "IO Virtualization",
+ "Description": "IO Virtualization"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 3,
+ "Name": "Link Status",
+ "Description": "Link Status"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 123,
+ "Name": "Liquid Cooling System",
+ "Description": "Liquid Cooling System"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 80,
+ "Name": "Memory",
+ "Description": "Memory"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 102,
+ "Name": "NIC Configuration",
+ "Description": "NIC Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 96,
+ "Name": "OS Event",
+ "Description": "OS Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 94,
+ "Name": "Physical Disk",
+ "Description": "Physical Disk"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 61,
+ "Name": "Processor",
+ "Description": "Processor"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 68,
+ "Name": "Processor Absent",
+ "Description": "Processor Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 103,
+ "Name": "PSU Absent",
+ "Description": "PSU Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 44,
+ "Name": "Redundancy",
+ "Description": "Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 120,
+ "Name": "Secure Enterprise Key Management",
+ "Description": "Secure Enterprise Key Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 29,
+ "Name": "System Event Log",
+ "Description": "System Event Log"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 13,
+ "Name": "System Performance Event",
+ "Description": "System Performance Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 16,
+ "Name": "Temperature Statistics",
+ "Description": "Temperature Statistics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 5,
+ "Name": "vFlash Absent",
+ "Description": "vFlash Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 66,
+ "Name": "vFlash Event",
+ "Description": "vFlash Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7,
+ "Name": "Virtual Console",
+ "Description": "Virtual Console"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 46,
+ "Name": "Virtual Disk",
+ "Description": "Virtual Disk"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 40,
+ "Name": "Voltage",
+ "Description": "Voltage"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 3,
+ "Name": "Updates",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 51,
+ "Name": "Firmware Download",
+ "Description": "Firmware Download"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 24,
+ "Name": "Firmware Update Job",
+ "Description": "Firmware Update Job"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 53,
+ "Name": "Group Manager",
+ "Description": "Group Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 27,
+ "Name": "Job Control",
+ "Description": "Job Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 6,
+ "Name": "Work Notes",
+ "CatalogName": "iDRAC",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 54,
+ "Name": "BIOS Management",
+ "Description": "BIOS Management"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('IF-MIB')",
+ "Name": "IF-MIB",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "IF-MIB",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 101,
+ "Name": "Interface",
+ "Description": "Interface"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('Internal%20Events%20Catalog')",
+ "Name": "Internal Events Catalog",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 54,
+ "Name": "BIOS Management",
+ "Description": "BIOS Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 12,
+ "Name": "Debug",
+ "Description": "Debug"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 115,
+ "Name": "Fabric",
+ "Description": "Fabric"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 21,
+ "Name": "Feature Card",
+ "Description": "Feature Card"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 10,
+ "Name": "Generic",
+ "Description": "Generic"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 53,
+ "Name": "Group Manager",
+ "Description": "Group Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 45,
+ "Name": "iDRAC Service Module",
+ "Description": "iDRAC Service Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 101,
+ "Name": "Interface",
+ "Description": "Interface"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 114,
+ "Name": "IP Address",
+ "Description": "IP Address"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 27,
+ "Name": "Job Control",
+ "Description": "Job Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 48,
+ "Name": "Licensing",
+ "Description": "Licensing"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 57,
+ "Name": "Lifecycle Controller",
+ "Description": "Lifecycle Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 32,
+ "Name": "Link",
+ "Description": "Link"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 3,
+ "Name": "Link Status",
+ "Description": "Link Status"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 80,
+ "Name": "Memory",
+ "Description": "Memory"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 77,
+ "Name": "Node",
+ "Description": "Node"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 44,
+ "Name": "Redundancy",
+ "Description": "Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 95,
+ "Name": "REST",
+ "Description": "REST"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 9,
+ "Name": "Security",
+ "Description": "Security"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 14,
+ "Name": "Server",
+ "Description": "Server"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 56,
+ "Name": "User Tracking",
+ "Description": "User Tracking"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 35,
+ "Name": "Users",
+ "Description": "Users"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 50,
+ "Name": "Virtual Media",
+ "Description": "Virtual Media"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 5,
+ "Name": "Configuration",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 49,
+ "Name": "Auto-Discovery",
+ "Description": "Auto-Discovery"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 107,
+ "Name": "Backup/Restore",
+ "Description": "Backup/Restore"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 54,
+ "Name": "BIOS Management",
+ "Description": "BIOS Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 104,
+ "Name": "BOOT Control",
+ "Description": "BOOT Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 59,
+ "Name": "Certificate Management",
+ "Description": "Certificate Management"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 4,
+ "Name": "Chassis",
+ "Description": "Chassis"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 8,
+ "Name": "Common",
+ "Description": "Common"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 116,
+ "Name": "Device Warranty",
+ "Description": "Device Warranty"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 34,
+ "Name": "Diagnostics",
+ "Description": "Diagnostics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 115,
+ "Name": "Fabric",
+ "Description": "Fabric"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 70,
+ "Name": "Fabric NVFA",
+ "Description": "Fabric NVFA"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 83,
+ "Name": "Fibre Channel",
+ "Description": "Fibre Channel"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 51,
+ "Name": "Firmware Download",
+ "Description": "Firmware Download"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 53,
+ "Name": "Group Manager",
+ "Description": "Group Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 84,
+ "Name": "Groups",
+ "Description": "Groups"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 86,
+ "Name": "Interface NVIF",
+ "Description": "Interface NVIF"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 98,
+ "Name": "IO Identity Optimization",
+ "Description": "IO Identity Optimization"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 114,
+ "Name": "IP Address",
+ "Description": "IP Address"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 27,
+ "Name": "Job Control",
+ "Description": "Job Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 48,
+ "Name": "Licensing",
+ "Description": "Licensing"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 57,
+ "Name": "Lifecycle Controller",
+ "Description": "Lifecycle Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 37,
+ "Name": "Network",
+ "Description": "Network"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 102,
+ "Name": "NIC Configuration",
+ "Description": "NIC Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 77,
+ "Name": "Node",
+ "Description": "Node"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 73,
+ "Name": "Node NVNO",
+ "Description": "Node NVNO"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 97,
+ "Name": "OS Deployment",
+ "Description": "OS Deployment"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 1,
+ "Name": "Part Replacement",
+ "Description": "Part Replacement"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 22,
+ "Name": "Remote Service",
+ "Description": "Remote Service"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 95,
+ "Name": "REST",
+ "Description": "REST"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 63,
+ "Name": "SAS IOM",
+ "Description": "SAS IOM"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 9,
+ "Name": "Security",
+ "Description": "Security"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 30,
+ "Name": "Server Interface",
+ "Description": "Server Interface"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 6,
+ "Name": "Subscription",
+ "Description": "Subscription"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 88,
+ "Name": "Templates",
+ "Description": "Templates"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 79,
+ "Name": "Test Alert",
+ "Description": "Test Alert"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 43,
+ "Name": "Topology",
+ "Description": "Topology"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 17,
+ "Name": "Topology Graph",
+ "Description": "Topology Graph"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 33,
+ "Name": "Uplink",
+ "Description": "Uplink"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 56,
+ "Name": "User Tracking",
+ "Description": "User Tracking"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 35,
+ "Name": "Users",
+ "Description": "Users"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 66,
+ "Name": "vFlash Event",
+ "Description": "vFlash Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 74,
+ "Name": "vFlash Media",
+ "Description": "vFlash Media"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 7,
+ "Name": "Miscellaneous",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 85,
+ "Name": "Application",
+ "Description": "Application"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 2,
+ "Name": "Storage",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 108,
+ "Name": "Battery Event",
+ "Description": "Battery Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 89,
+ "Name": "Cable",
+ "Description": "Cable"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 34,
+ "Name": "Diagnostics",
+ "Description": "Diagnostics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 100,
+ "Name": "Fluid Cache",
+ "Description": "Fluid Cache"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 94,
+ "Name": "Physical Disk",
+ "Description": "Physical Disk"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 63,
+ "Name": "SAS IOM",
+ "Description": "SAS IOM"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 99,
+ "Name": "SSD Devices",
+ "Description": "SSD Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 18,
+ "Name": "Storage",
+ "Description": "Storage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 113,
+ "Name": "Storage Controller",
+ "Description": "Storage Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 82,
+ "Name": "Storage Enclosure",
+ "Description": "Storage Enclosure"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 46,
+ "Name": "Virtual Disk",
+ "Description": "Virtual Disk"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 67,
+ "Name": "Amperage",
+ "Description": "Amperage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 41,
+ "Name": "Auto System Reset",
+ "Description": "Auto System Reset"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 108,
+ "Name": "Battery Event",
+ "Description": "Battery Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 75,
+ "Name": "BIOS POST",
+ "Description": "BIOS POST"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 89,
+ "Name": "Cable",
+ "Description": "Cable"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 69,
+ "Name": "Dell Key Manager",
+ "Description": "Dell Key Manager"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 90,
+ "Name": "Devices",
+ "Description": "Devices"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 34,
+ "Name": "Diagnostics",
+ "Description": "Diagnostics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 115,
+ "Name": "Fabric",
+ "Description": "Fabric"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 70,
+ "Name": "Fabric NVFA",
+ "Description": "Fabric NVFA"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 83,
+ "Name": "Fibre Channel",
+ "Description": "Fibre Channel"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 64,
+ "Name": "FlexAddress SD",
+ "Description": "FlexAddress SD"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 62,
+ "Name": "IDSDM Absent",
+ "Description": "IDSDM Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 65,
+ "Name": "IDSDM Media",
+ "Description": "IDSDM Media"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 111,
+ "Name": "IDSDM Redundancy",
+ "Description": "IDSDM Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 3,
+ "Name": "Link Status",
+ "Description": "Link Status"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 15,
+ "Name": "Management Module",
+ "Description": "Management Module"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 80,
+ "Name": "Memory",
+ "Description": "Memory"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 118,
+ "Name": "Metrics",
+ "Description": "Metrics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 102,
+ "Name": "NIC Configuration",
+ "Description": "NIC Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 77,
+ "Name": "Node",
+ "Description": "Node"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 96,
+ "Name": "OS Event",
+ "Description": "OS Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 81,
+ "Name": "PCI Device",
+ "Description": "PCI Device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 94,
+ "Name": "Physical Disk",
+ "Description": "Physical Disk"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 61,
+ "Name": "Processor",
+ "Description": "Processor"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 68,
+ "Name": "Processor Absent",
+ "Description": "Processor Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 103,
+ "Name": "PSU Absent",
+ "Description": "PSU Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 44,
+ "Name": "Redundancy",
+ "Description": "Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 63,
+ "Name": "SAS IOM",
+ "Description": "SAS IOM"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 92,
+ "Name": "Support Assist",
+ "Description": "Support Assist"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 29,
+ "Name": "System Event Log",
+ "Description": "System Event Log"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 13,
+ "Name": "System Performance Event",
+ "Description": "System Performance Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 16,
+ "Name": "Temperature Statistics",
+ "Description": "Temperature Statistics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 55,
+ "Name": "UEFI Event",
+ "Description": "UEFI Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 56,
+ "Name": "User Tracking",
+ "Description": "User Tracking"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 5,
+ "Name": "vFlash Absent",
+ "Description": "vFlash Absent"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 66,
+ "Name": "vFlash Event",
+ "Description": "vFlash Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 74,
+ "Name": "vFlash Media",
+ "Description": "vFlash Media"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 40,
+ "Name": "Voltage",
+ "Description": "Voltage"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 3,
+ "Name": "Updates",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 106,
+ "Name": "Fan Event",
+ "Description": "Fan Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 51,
+ "Name": "Firmware Download",
+ "Description": "Firmware Download"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 24,
+ "Name": "Firmware Update Job",
+ "Description": "Firmware Update Job"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 27,
+ "Name": "Job Control",
+ "Description": "Job Control"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 57,
+ "Name": "Lifecycle Controller",
+ "Description": "Lifecycle Controller"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 109,
+ "Name": "RAC Event",
+ "Description": "RAC Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 52,
+ "Name": "Software Change",
+ "Description": "Software Change"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 39,
+ "Name": "Software Config",
+ "Description": "Software Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 6,
+ "Name": "Work Notes",
+ "CatalogName": "Internal Events Catalog",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 56,
+ "Name": "User Tracking",
+ "Description": "User Tracking"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('Networking')",
+ "Name": "Networking",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Networking",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('OMSA')",
+ "Name": "OMSA",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 4,
+ "Name": "Audit",
+ "CatalogName": "OMSA",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 19,
+ "Name": "Log Event",
+ "Description": "Log Event"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 5,
+ "Name": "Configuration",
+ "CatalogName": "OMSA",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 41,
+ "Name": "Auto System Reset",
+ "Description": "Auto System Reset"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 61,
+ "Name": "Processor",
+ "Description": "Processor"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "OMSA",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 67,
+ "Name": "Amperage",
+ "Description": "Amperage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 41,
+ "Name": "Auto System Reset",
+ "Description": "Auto System Reset"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 108,
+ "Name": "Battery Event",
+ "Description": "Battery Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 89,
+ "Name": "Cable",
+ "Description": "Cable"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 11,
+ "Name": "Hardware Config",
+ "Description": "Hardware Config"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 80,
+ "Name": "Memory",
+ "Description": "Memory"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 78,
+ "Name": "Power Supply",
+ "Description": "Power Supply"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 23,
+ "Name": "Power Usage",
+ "Description": "Power Usage"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 61,
+ "Name": "Processor",
+ "Description": "Processor"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 44,
+ "Name": "Redundancy",
+ "Description": "Redundancy"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 25,
+ "Name": "Security Event",
+ "Description": "Security Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 29,
+ "Name": "System Event Log",
+ "Description": "System Event Log"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 110,
+ "Name": "Temperature",
+ "Description": "Temperature"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 66,
+ "Name": "vFlash Event",
+ "Description": "vFlash Event"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 40,
+ "Name": "Voltage",
+ "Description": "Voltage"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('OpenManage%20Enterprise')",
+ "Name": "OpenManage Enterprise",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "OpenManage Enterprise",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7400,
+ "Name": "Health Status of Managed device",
+ "Description": "Health Status of Managed device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 118,
+ "Name": "Metrics",
+ "Description": "Metrics"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 71,
+ "Name": "System Info",
+ "Description": "System Info"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('OpenManage%20Essentials')",
+ "Name": "OpenManage Essentials",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "OpenManage Essentials",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7400,
+ "Name": "Health Status of Managed device",
+ "Description": "Health Status of Managed device"
+ },
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 6,
+ "Name": "Work Notes",
+ "CatalogName": "OpenManage Essentials",
+ "SubCategoryDetails": []
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('Power%20Manager')",
+ "Name": "Power Manager",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "Power Manager",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 151,
+ "Name": "Power Configuration",
+ "Description": "Power Configuration"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('RFC1215')",
+ "Name": "RFC1215",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "RFC1215",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('SNMPv2-MIB')",
+ "Name": "SNMPv2-MIB",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "SNMPv2-MIB",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "@odata.type": "#AlertService.AlertCategories",
+ "@odata.id": "/api/AlertService/AlertCategories('VMWare')",
+ "Name": "VMWare",
+ "IsBuiltIn": True,
+ "CategoriesDetails": [
+ {
+ "@odata.type": "#AlertService.AlertCategory",
+ "Id": 1,
+ "Name": "System Health",
+ "CatalogName": "VMWare",
+ "SubCategoryDetails": [
+ {
+ "@odata.type": "#AlertService.AlertSubCategory",
+ "Id": 7700,
+ "Name": "Other",
+ "Description": "Other"
+ }
+ ]
+ }
+ ]
+ }
+ ]}}])
+ def test_ome_alert_policies_category_info(self, params, ome_connection_mock_for_alert_category, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert isinstance(result['categories'], list)
+ assert result['msg'] == params['message']
+ for ctr in result['categories']:
+ assert 'CategoriesDetails' in ctr
+ for k in ctr.keys():
+ assert '@odata.' not in k
+
+ @pytest.mark.parametrize("exc_type",
+ [SSLValidationError, ConnectionError, TypeError, ValueError, OSError, HTTPError, URLError])
+ def test_ome_alert_policies_category_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_alert_category,
+ ome_response_mock):
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == HTTPError:
+ mocker.patch(MODULE_PATH + 'get_all_data_with_pagination', side_effect=exc_type(
+ 'https://testhost.com', 401, 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module(ome_default_args)
+ assert result['failed'] is True
+ elif exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_all_data_with_pagination',
+ side_effect=exc_type("exception message"))
+ # ome_connection_mock_for_alert_category.get_all_data_with_pagination.side_effect = exc_type("exception message")
+ result = self._run_module(ome_default_args)
+ assert result['unreachable'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_all_data_with_pagination',
+ side_effect=exc_type("exception message"))
+ result = self._run_module(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_info.py
new file mode 100644
index 000000000..425bc1faa
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_info.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_alert_policies_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+MODULE_SUCCESS_MESSAGE_ALL = "Successfully retrieved all the OME alert policies information."
+MODULE_SUCCESS_MESSAGE_SPECIFIC = "Successfully retrieved {0} OME alert policy information."
+POLICY_NAME_NOT_FOUND_OR_EMPTY = "The OME alert policy name {0} provided does not exist or empty."
+
+
+class TestOmeAlertPolicyInfo(FakeAnsibleModule):
+ """Pyest class for ome_alert_policies_info module."""
+ module = ome_alert_policies_info
+ resp_mock_value = {"@odata.context": "/api/$metadata#Collection(JobService.Job)",
+ "@odata.count": 1,
+ "value": [
+ {
+ "Id": 10006,
+ "Name": "TestAlert1",
+ "Description": "This policy is applicable to critical alerts.",
+ "State": True,
+ "Visible": True,
+ "Owner": None,
+ },
+ {
+ "Id": 10010,
+ "Name": "TestAlert2",
+ "Description": "This policy is applicable to critical alerts.",
+ "State": True,
+ "Visible": True,
+ "Owner": None,
+ }
+ ]}
+
+ @pytest.fixture
+ def ome_connection_alert_policy_info_mock(self, mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_alert_policies_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+ def test_all_ome_alert_policy_info_success_case(self, ome_default_args, ome_connection_alert_policy_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = self.resp_mock_value
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['policies'][0]["Id"] == 10006
+ assert "@odata.count" not in result['policies'][0]
+ assert result['msg'] == MODULE_SUCCESS_MESSAGE_ALL
+
+ def test_policy_name_ome_alert_policy_info_success_case(self, ome_default_args, ome_connection_alert_policy_info_mock,
+ ome_response_mock):
+ policy_name = 'TestAlert2'
+ ome_default_args.update({"policy_name": policy_name})
+ ome_response_mock.json_data = self.resp_mock_value
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['policies'][0]["Id"] == 10010
+ assert "@odata.count" not in result['policies'][0]
+ assert result['msg'] == MODULE_SUCCESS_MESSAGE_SPECIFIC.format(policy_name)
+
+ def test_random_policy_name_ome_alert_policy_info(self, ome_default_args, ome_connection_alert_policy_info_mock,
+ ome_response_mock):
+ random_name = 'Random'
+ ome_default_args.update({"policy_name": random_name})
+ ome_response_mock.json_data = self.resp_mock_value
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['policies'] == []
+ assert result['msg'] == POLICY_NAME_NOT_FOUND_OR_EMPTY.format(random_name)
+
+ def test_empty_policy_name_ome_alert_policy_info(self, ome_default_args, ome_connection_alert_policy_info_mock,
+ ome_response_mock):
+ empty_name = ""
+ ome_default_args.update({"policy_name": empty_name})
+ ome_response_mock.json_data = self.resp_mock_value
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['policies'] == []
+ assert result['msg'] == POLICY_NAME_NOT_FOUND_OR_EMPTY.format(empty_name)
+
+ @pytest.mark.parametrize("exc_type", [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_ome_alert_policy_info_main_exception_case(self, exc_type, mocker, ome_default_args, ome_connection_alert_policy_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(
+ MODULE_PATH + 'ome_alert_policies_info.OMEAlertPolicyInfo.get_alert_policy_info',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(
+ MODULE_PATH + 'ome_alert_policies_info.OMEAlertPolicyInfo.get_alert_policy_info',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module(ome_default_args)
+ if exc_type != URLError:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py
new file mode 100644
index 000000000..758bbfaaf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_alert_policies_message_id_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_alert_policies_message_id_info_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'ome_alert_policies_message_id_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeAlertPoliciesMessageIDInfo(FakeAnsibleModule):
+ module = ome_alert_policies_message_id_info
+
+ def test_alert_policies_message_id_info_success_case(self, ome_default_args, ome_alert_policies_message_id_info_mock, ome_response_mock):
+ ome_response_mock.json_data = {"value": [
+ {
+ "Category": "System Health",
+ "Message": "The ${0} sensor has failed, and the last recorded value by the sensor was ${1} A.",
+ "MessageId": "AMP400",
+ "Prefix": "AMP",
+ "SequenceNo": 400,
+ "Severity": "Critical",
+ "SubCategory": "Amperage"
+ }
+ ]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert 'message_ids' in result
+ assert result['msg'] == "Successfully retrieved alert policies message ids information."
+
+ def test_ome_alert_policies_message_id_info_empty_case(self, ome_default_args,
+ ome_alert_policies_message_id_info_mock,
+ ome_response_mock):
+ ome_response_mock.json_data = {"value": []}
+ ome_response_mock.status_code = 200
+ ome_response_mock.success = True
+ result = self._run_module(ome_default_args)
+ assert result['message_ids'] == []
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_ome_alert_policies_message_id_info_main_exception_handling_case(self, exc_type, ome_default_args,
+ ome_alert_policies_message_id_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_alert_policies_message_id_info_mock.invoke_request.side_effect = exc_type('test')
+ else:
+ ome_alert_policies_message_id_info_mock.invoke_request.side_effect = exc_type('https://testhost.com',
+ 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ result = self._run_module(ome_default_args)
+ if exc_type != URLError:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py
index b5bc1d947..f30a6a049 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_smtp.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.3.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -18,10 +18,9 @@ from io import StringIO
import pytest
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.urls import SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_alerts_smtp
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
SUCCESS_MSG = "Successfully updated the SMTP settings."
SMTP_URL = "AlertService/AlertDestinations/SMTPConfiguration"
@@ -451,7 +450,7 @@ class TestAppAlertsSMTP(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'fetch_smtp_settings',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py
index ea4551d93..4ae3922a0 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_alerts_syslog.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.3.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -45,25 +45,25 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
{"module_args": {
"syslog_servers": [
{
- "destination_address": "192.168.10.41",
+ "destination_address": "XX.XX.XX.XX",
"enabled": True,
"id": 1,
"port_number": 514
},
{
- "destination_address": "192.168.10.46",
+ "destination_address": "XY.XY.XY.XY",
"enabled": False,
"id": 2,
"port_number": 514
},
{
- "destination_address": "192.168.10.43",
+ "destination_address": "YY.YY.YY.YY",
"enabled": False,
"id": 3,
"port_number": 514
},
{
- "destination_address": "192.168.10.44",
+ "destination_address": "ZZ.ZZ.ZZ.ZZ",
"enabled": True,
"id": 4,
"port_number": 514
@@ -77,28 +77,28 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 1,
"Enabled": True,
- "DestinationAddress": "192.168.10.41",
+ "DestinationAddress": "XX.XX.XX.XX",
"PortNumber": 514
},
{
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 2,
"Enabled": False,
- "DestinationAddress": "192.168.10.46",
+ "DestinationAddress": "XY.XY.XY.XY",
"PortNumber": 0
},
{
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 3,
"Enabled": False,
- "DestinationAddress": "192.168.10.43",
+ "DestinationAddress": "YY.YY.YY.YY",
"PortNumber": 514
},
{
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 4,
"Enabled": True,
- "DestinationAddress": "192.168.10.44",
+ "DestinationAddress": "ZZ.ZZ.ZZ.ZZ",
"PortNumber": 514
}
]
@@ -106,13 +106,13 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
{"module_args": {
"syslog_servers": [
{
- "destination_address": "192.168.10.41",
+ "destination_address": "XX.XX.XX.XX",
"enabled": True,
"id": 1,
"port_number": 514
},
{
- "destination_address": "192.168.10.46",
+ "destination_address": "XY.XY.XY.XY",
"enabled": False,
"id": 2,
"port_number": 514
@@ -126,14 +126,14 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 1,
"Enabled": True,
- "DestinationAddress": "192.168.10.41",
+ "DestinationAddress": "XX.XX.XX.XX",
"PortNumber": 511
},
{
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 2,
"Enabled": True,
- "DestinationAddress": "192.168.10.46",
+ "DestinationAddress": "XY.XY.XY.XY",
"PortNumber": 514
}
]
@@ -141,13 +141,13 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
{"check_mode": True, "module_args": {
"syslog_servers": [
{
- "destination_address": "192.168.10.41",
+ "destination_address": "XX.XX.XX.XX",
"enabled": True,
"id": 1,
"port_number": 514
},
{
- "destination_address": "192.168.10.46",
+ "destination_address": "XY.XY.XY.XY",
"enabled": False,
"id": 2,
"port_number": 514
@@ -161,14 +161,14 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 1,
"Enabled": True,
- "DestinationAddress": "192.168.10.41",
+ "DestinationAddress": "XX.XX.XX.XX",
"PortNumber": 511
},
{
"@odata.type": "#AlertDestinations.SyslogConfiguration",
"Id": 2,
"Enabled": True,
- "DestinationAddress": "192.168.10.46",
+ "DestinationAddress": "XY.XY.XY.XY",
"PortNumber": 514
}
]
@@ -179,31 +179,31 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
{"module_args": {
"syslog_servers": [
{
- "destination_address": "192.168.10.41",
+ "destination_address": "XX.XX.XX.XX",
"enabled": True,
"id": 1,
"port_number": 514
},
{
- "destination_address": "192.168.10.46",
+ "destination_address": "XY.XY.XY.XY",
"enabled": False,
"id": 2,
"port_number": 514
},
{
- "destination_address": "192.168.10.43",
+ "destination_address": "YY.YY.YY.YY",
"enabled": False,
"id": 3,
"port_number": 514
},
{
- "destination_address": "192.168.10.44",
+ "destination_address": "ZZ.ZZ.ZZ.ZZ",
"enabled": True,
"id": 4,
"port_number": 514
},
{
- "destination_address": "192.168.10.44",
+ "destination_address": "ZZ.ZZ.ZZ.ZZ",
"enabled": True,
"id": 4,
"port_number": 514
@@ -241,7 +241,7 @@ class TestOmeAlertSyslog(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'validate_input',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
index c31983bca..99c49c210 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
-# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.1.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -21,9 +21,10 @@ from io import StringIO
from ansible.module_utils._text import to_text
from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_certificate
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+EMAIL_ADDRESS = "support@dell.com"
@pytest.fixture
@@ -47,7 +48,7 @@ class TestOmeAppCSR(FakeAnsibleModule):
args = {"command": "generate_csr", "distinguished_name": "hostname.com",
"department_name": "Remote Access Group", "business_name": "Dell Inc.",
"locality": "Round Rock", "country_state": "Texas", "country": "US",
- "email": "support@dell.com"}
+ "email": EMAIL_ADDRESS, "subject_alternative_names": "XX.XX.XX.XX"}
ome_default_args.update(args)
if exc_type == URLError:
mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
@@ -61,7 +62,7 @@ class TestOmeAppCSR(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
@@ -76,14 +77,15 @@ class TestOmeAppCSR(FakeAnsibleModule):
args = {"command": "generate_csr", "distinguished_name": "hostname.com",
"department_name": "Remote Access Group", "business_name": "Dell Inc.",
"locality": "Round Rock", "country_state": "Texas", "country": "US",
- "email": "support@dell.com"}
+ "email": EMAIL_ADDRESS, "subject_alternative_names": "XX.XX.XX.XX"}
f_module = self.get_module_mock(params=args)
result = self.module.get_resource_parameters(f_module)
assert result[0] == "POST"
assert result[1] == "ApplicationService/Actions/ApplicationService.GenerateCSR"
assert result[2] == {'DistinguishedName': 'hostname.com', 'Locality': 'Round Rock',
'DepartmentName': 'Remote Access Group', 'BusinessName': 'Dell Inc.',
- 'State': 'Texas', 'Country': 'US', 'Email': 'support@dell.com'}
+ 'State': 'Texas', 'Country': 'US', 'Email': 'support@dell.com',
+ 'San': 'XX.XX.XX.XX'}
def test_upload_csr_fail01(self, mocker, ome_default_args, ome_connection_mock_for_application_certificate,
ome_response_mock):
@@ -108,13 +110,13 @@ class TestOmeAppCSR(FakeAnsibleModule):
csr_json = {"CertificateData": "--BEGIN-REQUEST--"}
payload = {"DistinguishedName": "hostname.com", "DepartmentName": "Remote Access Group",
"BusinessName": "Dell Inc.", "Locality": "Round Rock", "State": "Texas",
- "Country": "US", "Email": "support@dell.com"}
+ "Country": "US", "Email": EMAIL_ADDRESS, "subject_alternative_names": "XX.XX.XX.XX"}
mocker.patch(MODULE_PATH + 'ome_application_certificate.get_resource_parameters',
return_value=("POST", "ApplicationService/Actions/ApplicationService.GenerateCSR", payload))
ome_default_args.update({"command": "generate_csr", "distinguished_name": "hostname.com",
"department_name": "Remote Access Group", "business_name": "Dell Inc.",
"locality": "Round Rock", "country_state": "Texas", "country": "US",
- "email": "support@dell.com"})
+ "email": EMAIL_ADDRESS, "subject_alternative_names": "XX.XX.XX.XX, YY.YY.YY.YY"})
ome_response_mock.success = True
ome_response_mock.json_data = csr_json
result = self.execute_module(ome_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
index 3a86a3f0d..627c5e71d 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -18,11 +18,9 @@ from io import StringIO
import pytest
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ssl import SSLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.urls import SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_console_preferences
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
SUCCESS_MSG = "Successfully updated the Console Preferences settings."
SETTINGS_URL = "ApplicationService/Settings"
@@ -2233,7 +2231,7 @@ class TestOmeAppConsolePreferences(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + '_validate_params',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py
index 3938184ed..01cf4afdd 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_address.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -19,7 +19,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_address
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
@@ -351,7 +351,7 @@ class TestOmeAppNetwork(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_application_network_address.validate_input',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py
index f4d32fcd3..af34a6652 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_proxy.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.0.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -21,7 +21,7 @@ from io import StringIO
from ansible.module_utils._text import to_text
from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_proxy
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
CHECK_MODE_CHANGE_FOUND_MSG = "Changes found to be applied."
@@ -128,7 +128,7 @@ class TestOmeTemplate(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_application_network_proxy.get_payload',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
@@ -160,7 +160,7 @@ class TestOmeTemplate(FakeAnsibleModule):
def test_get_payload(self, ome_default_args):
new_param = {
- "ip_address": "192.168.0.2",
+ "ip_address": "YY.YY.YY.YY",
"proxy_port": 443,
"enable_proxy": True,
"proxy_username": "username",
@@ -171,18 +171,18 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_default_args.update(new_param)
f_module = self.get_module_mock(params=ome_default_args)
payload = self.module.get_payload(f_module)
- assert ome_default_args == {"ip_address": "192.168.0.2",
+ assert ome_default_args == {"ip_address": "YY.YY.YY.YY",
"proxy_port": 443,
"enable_proxy": True,
"proxy_username": "username",
"proxy_password": "password",
"enable_authentication": False,
- "hostname": "192.168.0.1",
+ "hostname": "XX.XX.XX.XX",
"username": "username",
"password": "password",
"port": 443,
"ca_path": "/path/ca_bundle"}
- assert payload == {"EnableProxy": True, "IpAddress": "192.168.0.2", "PortNumber": 443, "Username": "username",
+ assert payload == {"EnableProxy": True, "IpAddress": "YY.YY.YY.YY", "PortNumber": 443, "Username": "username",
"Password": "password", "EnableAuthentication": False}
def test_get_updated_payload_success_case(self, mocker, ome_default_args, ome_connection_mock_for_application_network_proxy,
@@ -192,7 +192,7 @@ class TestOmeTemplate(FakeAnsibleModule):
"@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
"PortNumber": 443, "EnableAuthentication": False, "EnableProxy": True,
"Username": "username1", "Password": "password1"}
- payload = {"EnableAuthentication": True, "IpAddress": "192.168.0.1", "PortNumber": 443, 'EnableProxy': True,
+ payload = {"EnableAuthentication": True, "IpAddress": "XX.XX.XX.XX", "PortNumber": 443, 'EnableProxy': True,
'Username': 'username2', "Password": "password2"}
f_module = self.get_module_mock(params=ome_default_args)
ome_response_mock.json_data = current_setting
@@ -212,14 +212,14 @@ class TestOmeTemplate(FakeAnsibleModule):
"@odata.id": "/api/ApplicationService/Network/ProxyConfiguration", "IpAddress": "255.0.0.0",
"PortNumber": 443, "EnableAuthentication": True, "EnableProxy": True,
"Username": "username1", "Password": "password1"}
- payload = {"EnableAuthentication": False, "IpAddress": "192.168.0.1", "PortNumber": 443, 'EnableProxy': True,
+ payload = {"EnableAuthentication": False, "IpAddress": "XX.XX.XX.XX", "PortNumber": 443, 'EnableProxy': True,
'Username': 'username2', "Password": "password2"}
f_module = self.get_module_mock(params=ome_default_args)
ome_response_mock.json_data = current_setting
mocker.patch(MODULE_PATH + "ome_application_network_proxy.validate_check_mode_for_network_proxy",
return_value=None)
setting = self.module.get_updated_payload(ome_connection_mock_for_application_network_proxy, f_module, payload)
- assert setting == {"EnableAuthentication": False, "IpAddress": "192.168.0.1", "PortNumber": 443,
+ assert setting == {"EnableAuthentication": False, "IpAddress": "XX.XX.XX.XX", "PortNumber": 443,
'EnableProxy': True}
def test_get_updated_payload_when_same_setting_failure_case1(self, mocker, ome_default_args,
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py
index 0cd91a7f5..7a4ec5354 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_settings.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.4.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -16,12 +16,11 @@ import json
import pytest
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.urls import SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
-from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_settings
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
SUCCESS_MSG = "Successfully updated the session timeout settings."
NO_CHANGES = "No changes found to be applied."
@@ -375,7 +374,7 @@ class TestOmeApplicationNetworkSettings(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'fetch_session_inactivity_settings',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py
index 53e323117..b5b7de549 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_time.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.0.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -21,7 +21,7 @@ from io import StringIO
from ansible.module_utils._text import to_text
from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_time
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
@@ -72,13 +72,13 @@ class TestOmeTemplate(FakeAnsibleModule):
assert result["msg"] == "Successfully configured network time."
@pytest.mark.parametrize("param1", [{"enable_ntp": True, "time_zone": "TZ_ID_66"}])
- @pytest.mark.parametrize("param2", [{"primary_ntp_address": "192.168.0.2"},
- {"secondary_ntp_address1": "192.168.0.3"},
- {"secondary_ntp_address2": "192.168.0.4"},
- {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address1": "192.168.0.3"},
- {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address2": "192.168.0.4"},
- {"primary_ntp_address": "192.168.0.2", "secondary_ntp_address1": "192.168.0.3",
- "secondary_ntp_address2": "192.168.0.4"}
+ @pytest.mark.parametrize("param2", [{"primary_ntp_address": "YY.YY.YY.YY"},
+ {"secondary_ntp_address1": "XX.XX.XX.XX"},
+ {"secondary_ntp_address2": "XY.XY.XY.XY"},
+ {"primary_ntp_address": "YY.YY.YY.YY", "secondary_ntp_address1": "XX.XX.XX.XX"},
+ {"primary_ntp_address": "YY.YY.YY.YY", "secondary_ntp_address2": "XY.XY.XY.XY"},
+ {"primary_ntp_address": "YY.YY.YY.YY", "secondary_ntp_address1": "XX.XX.XX.XX",
+ "secondary_ntp_address2": "XY.XY.XY.XY"}
])
def test_ome_application_network_time_main_enable_ntp_true_success_case_01(self, mocker, ome_default_args, param1,
param2,
@@ -93,9 +93,9 @@ class TestOmeTemplate(FakeAnsibleModule):
time_data = {
"EnableNTP": True,
"JobId": None,
- "PrimaryNTPAddress": "192.168.0.2",
- "SecondaryNTPAddress1": "192.168.0.3",
- "SecondaryNTPAddress2": "192.168.0.4",
+ "PrimaryNTPAddress": "YY.YY.YY.YY",
+ "SecondaryNTPAddress1": "XX.XX.XX.XX",
+ "SecondaryNTPAddress2": "XY.XY.XY.XY",
"SystemTime": None,
"TimeSource": "10.136.112.222",
"TimeZone": "TZ_ID_66",
@@ -196,7 +196,7 @@ class TestOmeTemplate(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_application_network_time.get_payload',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
@@ -210,9 +210,9 @@ class TestOmeTemplate(FakeAnsibleModule):
new_param = {
"enable_ntp": True,
"time_zone": "TimeZone",
- "primary_ntp_address": "192.168.0.2",
- "secondary_ntp_address1": "192.168.0.3",
- "secondary_ntp_address2": "192.168.0.4"
+ "primary_ntp_address": "YY.YY.YY.YY",
+ "secondary_ntp_address1": "XX.XX.XX.XX",
+ "secondary_ntp_address2": "XY.XY.XY.XY"
}
ome_default_args.update(new_param)
self.module.remove_unwanted_keys(removable_keys, ome_default_args)
@@ -268,7 +268,7 @@ class TestOmeTemplate(FakeAnsibleModule):
"secondary_ntp_address2": "10.136.112.222",
"system_time": None,
"time_zone": "TZ_ID_66",
- "hostname": "192.168.0.1",
+ "hostname": "XX.XX.XX.XX",
"username": "username",
"password": "password",
"ca_path": "/path/ca_bundle"}
@@ -464,13 +464,13 @@ class TestOmeTemplate(FakeAnsibleModule):
assert exc.value.args[0] == msg
@pytest.mark.parametrize("sub_param", [
- {"primary_ntp_address": "192.168.02.1", "secondary_ntp_address1": "192.168.02.3",
- "secondary_ntp_address2": "192.168.02.2"},
- {"secondary_ntp_address1": "192.168.02.1"},
- {"secondary_ntp_address2": "192.168.02.1"},
- {"primary_ntp_address": "192.168.02.1", "time_zone": "TZ_01"},
- {"primary_ntp_address": "192.168.02.1"},
- {"secondary_ntp_address1": "192.168.02.1", "time_zone": "TZ_01"},
+ {"primary_ntp_address": "XX.XX.XX.XX", "secondary_ntp_address1": "ZZ.ZZ.ZZ.ZZ",
+ "secondary_ntp_address2": "YY.YY.YY.YY"},
+ {"secondary_ntp_address1": "XX.XX.XX.XX"},
+ {"secondary_ntp_address2": "XX.XX.XX.XX"},
+ {"primary_ntp_address": "XX.XX.XX.XX", "time_zone": "TZ_01"},
+ {"primary_ntp_address": "XX.XX.XX.XX"},
+ {"secondary_ntp_address1": "XX.XX.XX.XX", "time_zone": "TZ_01"},
])
def test_validate_input_time_enable_false_case_01(self, ome_default_args, sub_param):
params = {"enable_ntp": False}
@@ -482,10 +482,10 @@ class TestOmeTemplate(FakeAnsibleModule):
self.module.validate_input(f_module)
assert exc.value.args[0] == msg
- @pytest.mark.parametrize("sub_param", [{"time_zone": "TZ_01"}, {"primary_ntp_address": "192.168.02.1"},
- {"secondary_ntp_address1": "192.168.02.1"},
- {"secondary_ntp_address2": "192.168.02.1"},
- {"primary_ntp_address": "192.168.02.1", "time_zone": "TZ_01"}, {}
+ @pytest.mark.parametrize("sub_param", [{"time_zone": "TZ_01"}, {"primary_ntp_address": "XX.XX.XX.XX"},
+ {"secondary_ntp_address1": "XX.XX.XX.XX"},
+ {"secondary_ntp_address2": "XX.XX.XX.XX"},
+ {"primary_ntp_address": "XX.XX.XX.XX", "time_zone": "TZ_01"}, {}
])
def test_validate_input_time_enable_true_case_04(self, ome_default_args, sub_param):
"""
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py
index d6fbc3680..d5792ce30 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_network_webserver.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -20,7 +20,7 @@ from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_application_network_webserver
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
@@ -133,7 +133,7 @@ class TestOmeAppNetwork(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'ome_application_network_webserver.get_updated_payload',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py
index ef945ae63..e0ba31825 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_security_settings.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.4.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -350,6 +350,7 @@ class TestOmeSecuritySettings(FakeAnsibleModule):
ome_default_args.update(params['module_args'])
ome_connection_mock_for_security_settings.job_tracking.return_value = \
(params.get('job_failed'), params.get('job_message'))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
result = self._run_module(
ome_default_args, check_mode=params.get(
'check_mode', False))
@@ -390,7 +391,7 @@ class TestOmeSecuritySettings(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'login_security_setting',
- side_effect=exc_type('http://testhost.com',
+ side_effect=exc_type('https://testhost.com',
400,
'http error message',
{"accept-type": "application/json"},
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py
index 0d3504b14..10841d435 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_chassis_slots.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.6.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_chassis_slots
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
DEVICE_REPEATED = "Duplicate device entry found for devices with identifiers {0}."
INVALID_SLOT_DEVICE = "Unable to rename one or more slots because either the specified device is invalid or slots " \
@@ -290,7 +290,7 @@ class TestOmeChassisSlots(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_device_slot_config',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py
index 51ff166f0..370f53246 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_baseline.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.2.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -562,7 +562,7 @@ class TestOmeConfigCompBaseline(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'compliance_operation',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py
index b038b1191..d743ed53d 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_configuration_compliance_info.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -12,16 +12,15 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import pytest
import json
-from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.modules import ome_configuration_compliance_info
-from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
from io import StringIO
+
+import pytest
from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_configuration_compliance_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_configuration_compliance_info.'
@@ -31,18 +30,53 @@ def ome_connection_mock_for_compliance_info(mocker, ome_response_mock):
connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
- ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
- ome_connection_mock_obj.get_all_items_with_pagination.return_value = {"value": []}
+ ome_connection_mock_obj.get_all_report_details.return_value = {
+ "report_list": []}
+ ome_connection_mock_obj.get_all_items_with_pagination.return_value = {
+ "value": []}
return ome_connection_mock_obj
class TestBaselineComplianceInfo(FakeAnsibleModule):
module = ome_configuration_compliance_info
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"report_list": [
+ {'Name': 'b1', 'Id': 123,
+ 'TemplateId': 23},
+ {'Name': 'b2', 'Id': 124,
+ 'TemplateId': 24}],
+ 'ComplianceAttributeGroups': [{"Device": "Compliant"}]},
+ 'report': [{'Device': 'Compliant'}],
+ 'mparams': {"baseline": "b1", "device_id": 1234}},
+ {"json_data": {"report_list": [
+ {'Name': 'b1', 'Id': 123, 'TemplateId': 23},
+ {'Name': 'b2', 'Id': 124, 'TemplateId': 24}],
+ 'value': [{'Id': 123, 'ServiceTag': 'ABCD123'},
+ {'Id': 124, 'ServiceTag': 'ABCD124'}],
+ 'ComplianceAttributeGroups': [{"Device": "Compliant"}]},
+ 'report': [{'ComplianceAttributeGroups': [{'Device': 'Compliant'}], 'Id': 123, 'ServiceTag': 'ABCD123'}],
+ 'mparams': {"baseline": "b1", "device_service_tag": 'ABCD123'}}
+ ])
+ def test_ome_configuration_compliance_info_success(self, params, ome_connection_mock_for_compliance_info, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_compliance_info.get_all_report_details.return_value = params[
+ 'json_data']
+ ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['compliance_info'] == params['report']
+
def test_validate_device(self, ome_connection_mock_for_compliance_info):
value_list = [{"Id": 25011, "ServiceTag": "FGHREF"}]
- report = ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {"value": value_list}
- f_module = self.get_module_mock(params={'baseline': "baseline_one", "device_id": 25011})
+ report = ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": value_list}
+ f_module = self.get_module_mock(
+ params={'baseline': "baseline_one", "device_id": 25011})
device = self.module.validate_device(f_module, report,
device_id=25011, service_tag=None, base_id=None)
service_tag = self.module.validate_device(f_module, report,
@@ -57,31 +91,53 @@ class TestBaselineComplianceInfo(FakeAnsibleModule):
def test_get_baseline_id(self, ome_connection_mock_for_compliance_info):
report_list = [{"Id": 1, "Name": "baseline_one", "TemplateId": 1}]
- ome_connection_mock_for_compliance_info.get_all_report_details.return_value = {"report_list": report_list}
+ ome_connection_mock_for_compliance_info.get_all_report_details.return_value = {
+ "report_list": report_list}
f_module = self.get_module_mock(params={'baseline': "baseline_one"})
- base_id, template_id = self.module.get_baseline_id(f_module, "baseline_one", ome_connection_mock_for_compliance_info)
+ base_id, template_id = self.module.get_baseline_id(
+ f_module, "baseline_one", ome_connection_mock_for_compliance_info)
with pytest.raises(Exception) as exc:
- self.module.get_baseline_id(f_module, "baseline_two", ome_connection_mock_for_compliance_info)
+ self.module.get_baseline_id(
+ f_module, "baseline_two", ome_connection_mock_for_compliance_info)
assert exc.value.args[0] == "Unable to complete the operation because the entered " \
"target baseline name 'baseline_two' is invalid."
assert base_id == 1
def test_compliance_report(self, ome_connection_mock_for_compliance_info, mocker, ome_response_mock):
value_list = [{"Id": 25011, "TemplateId": 1}]
- ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {"value": value_list}
+ ome_connection_mock_for_compliance_info.get_all_items_with_pagination.return_value = {
+ "value": value_list}
mocker.patch(MODULE_PATH + "get_baseline_id", return_value=25011)
f_module = self.get_module_mock(params={'baseline': "baseline_one"})
- ome_response_mock.json_data = {"value": [{"Id": 25011, "TemplateId": 1}]}
+ ome_response_mock.json_data = {
+ "value": [{"Id": 25011, "TemplateId": 1}]}
mocker.patch(MODULE_PATH + 'get_baseline_id', return_value=(1, 1))
- report = self.module.compliance_report(f_module, ome_connection_mock_for_compliance_info)
- assert report == [{'Id': 25011, 'ComplianceAttributeGroups': None, 'TemplateId': 1}]
+ report = self.module.compliance_report(
+ f_module, ome_connection_mock_for_compliance_info)
+ assert report == [
+ {'Id': 25011, 'ComplianceAttributeGroups': None, 'TemplateId': 1}]
- def test_main_exception(self, ome_connection_mock_for_compliance_info, mocker,
+ @pytest.mark.parametrize("exc_type",
+ [SSLValidationError, ConnectionError, TypeError, ValueError, OSError, HTTPError, URLError])
+ def test_main_exception(self, exc_type, ome_connection_mock_for_compliance_info, mocker,
ome_response_mock, ome_default_args):
- ome_default_args.update({"baseline": "baseline_one", "device_id": 25011})
- response = mocker.patch(MODULE_PATH + 'compliance_report')
- ome_response_mock.status_code = 200
- ome_response_mock.success = True
- ome_response_mock.json_data = {"report": "compliance_report"}
- report = self._run_module(ome_default_args)
- assert report["changed"] is False
+ ome_default_args.update(
+ {"baseline": "baseline_one", "device_id": 25011})
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == HTTPError:
+ mocker.patch(MODULE_PATH + 'compliance_report', side_effect=exc_type(
+ 'https://testhost.com', 401, 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ elif exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'compliance_report',
+ side_effect=exc_type("exception message"))
+ result = self._run_module(ome_default_args)
+ assert result['unreachable'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'compliance_report',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py
index f92a0abe5..e3f832c59 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_group.py
@@ -17,8 +17,7 @@ import json
from ssl import SSLError
from io import StringIO
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_group
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
@@ -32,7 +31,6 @@ INVALID_IP_FORMAT = "The format {0} of the IP address provided is not supported
IP_NOT_EXISTS = "The IP addresses provided do not exist in OpenManage Enterprise."
try:
from netaddr import IPAddress, IPNetwork, IPRange
- from netaddr.core import AddrFormatError
HAS_NETADDR = True
except ImportError:
@@ -67,7 +65,7 @@ class TestOMEDeviceGroup(FakeAnsibleModule):
def test_ome_device_group_get_group_id_case02(self, ome_connection_mock_for_device_group, ome_response_mock):
f_module = self.get_module_mock(params={"group_id": 1234,
"device_ids": [25011], "device_service_tags": []})
- ome_connection_mock_for_device_group.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ ome_connection_mock_for_device_group.invoke_request.side_effect = HTTPError('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(to_text(json.dumps(
@@ -195,7 +193,7 @@ class TestOMEDeviceGroup(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_group_id',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py
index bb41b51a3..d9bb6e82d 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_info.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.1.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -23,6 +23,7 @@ resource_detailed_inventory = {"detailed_inventory:": {"device_id": {Constants.d
Constants.device_id2: Constants.service_tag1}}}
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+HTTPS_ADDRESS = 'https://testhost.com'
class TestOmeDeviceInfo(FakeAnsibleModule):
@@ -61,13 +62,17 @@ class TestOmeDeviceInfo(FakeAnsibleModule):
validate_device_inputs_mock, ome_connection_mock,
get_device_resource_parameters_mock, ome_response_mock):
quer_param_mock = mocker.patch(MODULE_PATH + 'ome_device_info._get_query_parameters')
- quer_param_mock.return_value = {"filter": "Type eq '1000'"}
- ome_response_mock.json_data = {"value": [{"device_id1": "details", "device_id2": "details"}]}
+ quer_param_mock.return_value = {"filter": "Type eq 1000"}
+ ome_response_mock.json_data = {
+ "value": [{"device_id1": "details", "device_id2": "details"}],
+ "@odata.context": "/api/$metadata#Collection(DeviceService.Device)",
+ "@odata.count": 2,
+ }
ome_response_mock.status_code = 200
result = self._run_module(ome_default_args)
assert result['changed'] is False
assert 'device_info' in result
- assert result["device_info"] == {"value": [{"device_id1": "details", "device_id2": "details"}]}
+ assert "@odata.context" in result["device_info"]
def test_main_basic_inventory_failure_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
ome_connection_mock,
@@ -108,14 +113,14 @@ class TestOmeDeviceInfo(FakeAnsibleModule):
"device_id": {Constants.device_id1: "DeviceService/Devices(Constants.device_id1)/InventoryDetails"},
"device_service_tag": {Constants.service_tag1: "DeviceService/Devices(4321)/InventoryDetails"}}}
get_device_resource_parameters_mock.return_value = detailed_inventory
- ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ ome_connection_mock.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400, '', {}, None)
result = self._run_module(ome_default_args)
assert 'device_info' in result
def test_main_HTTPError_error_case(self, ome_default_args, module_mock, validate_device_inputs_mock,
ome_connection_mock,
get_device_resource_parameters_mock, ome_response_mock):
- ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ ome_connection_mock.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400, '', {}, None)
ome_response_mock.json_data = {"value": [{"device_id1": "details", "device_id2": "details"}]}
ome_response_mock.status_code = 400
result = self._run_module(ome_default_args)
@@ -197,7 +202,7 @@ class TestOmeDeviceInfo(FakeAnsibleModule):
self.module._get_device_id_from_service_tags([Constants.service_tag1, "INVALID"], ome_connection_mock)
def test_get_device_id_from_service_tags_error_case(self, ome_connection_mock, ome_response_mock):
- ome_connection_mock.get_all_report_details.side_effect = HTTPError('http://testhost.com', 400, '', {}, None)
+ ome_connection_mock.get_all_report_details.side_effect = HTTPError(HTTPS_ADDRESS, 400, '', {}, None)
with pytest.raises(HTTPError) as ex:
self.module._get_device_id_from_service_tags(["INVALID"], ome_connection_mock)
@@ -224,7 +229,7 @@ class TestOmeDeviceInfo(FakeAnsibleModule):
error_msg = '400: Bad Request'
service_tag_dict = {}
non_available_tags = [Constants.service_tag2]
- ome_connection_mock.invoke_request.side_effect = HTTPError('http://testhost.com', 400, error_msg, {}, None)
+ ome_connection_mock.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400, error_msg, {}, None)
with pytest.raises(HTTPError, match=error_msg) as ex:
self.module.update_device_details_with_filtering(non_available_tags, service_tag_dict, ome_connection_mock)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
index 23bae781c..9b92bb3c2 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.1.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -13,18 +13,32 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
-import pytest
-from ssl import SSLError
from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_local_access_configuration
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_local_access_configuration.'
+CONFIG_FAIL_MSG = "one of the following is required: enable_kvm_access, enable_chassis_direct_access, " \
+ "chassis_power_button, quick_sync, lcd"
+DOMAIN_FAIL_MSG = "The operation to configure the local access is supported only on " \
+ "OpenManage Enterprise Modular."
+FETCH_FAIL_MSG = "Unable to retrieve the device information."
+DEVICE_FAIL_MSG = "Unable to complete the operation because the entered target device {0} '{1}' is invalid."
+LAC_FAIL_MSG = "Unable to complete the operation because the local access configuration settings " \
+ "are not supported on the specified device."
+CHANGES_FOUND = "Changes found to be applied."
+NO_CHANGES_FOUND = "No changes found to be applied."
+SUCCESS_MSG = "Successfully updated the local access settings."
+HTTPS_ADDRESS = 'https://testhost.com'
+HTTP_ERROR_MSG = 'http error message'
+
@pytest.fixture
def ome_conn_mock_lac(mocker, ome_response_mock):
@@ -35,32 +49,214 @@ def ome_conn_mock_lac(mocker, ome_response_mock):
class TestOMEMDevicePower(FakeAnsibleModule):
-
module = ome_device_local_access_configuration
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceServiceTag': 'ABCD123', "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}],
+ "SettingType": "LocalAccessConfiguration", "EnableChassisDirect": False,
+ "EnableChassisPowerButton": False, "EnableKvmAccess": True, "EnableLcdOverridePin": False,
+ "LcdAccess": "VIEW_ONLY", "LcdCustomString": "LCD Text", "LcdLanguage": "en",
+ "LcdPresence": "Present", "LcdOverridePin": "123456",
+ "QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
+ "TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
+ "EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}},
+ 'message': "Successfully updated the local access settings.",
+ 'mparams': {"hostname": "XX.XX.XX.XX",
+ "device_service_tag": 'ABCD123',
+ 'enable_kvm_access': True, 'enable_chassis_direct_access': False,
+ 'chassis_power_button':
+ {'enable_chassis_power_button': False, 'enable_lcd_override_pin': True,
+ 'disabled_button_lcd_override_pin': "123456"
+ },
+ 'lcd':
+ {'lcd_access': 'VIEW_AND_MODIFY',
+ 'user_defined': 'LCD Text', 'lcd_language': 'en'},
+ 'quick_sync': {'enable_quick_sync_wifi': True, 'enable_inactivity_timeout': True,
+ 'timeout_limit': 10, 'timeout_limit_unit': 'MINUTES',
+ 'enable_read_authentication': True,
+ 'quick_sync_access': 'READ_WRITE'}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "dummyhostname_shouldnotexist",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}],
+ "SettingType": "LocalAccessConfiguration", "EnableChassisDirect": False,
+ "EnableChassisPowerButton": False, "EnableKvmAccess": True, "EnableLcdOverridePin": False,
+ "LcdAccess": "VIEW_ONLY", "LcdCustomString": "LCD Text", "LcdLanguage": "en",
+ "LcdPresence": "Present", "LcdOverridePin": "123456",
+ "QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
+ "TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
+ "EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}},
+ 'message': "Successfully updated the local access settings.",
+ 'mparams': {"hostname": "dummyhostname_shouldnotexist",
+ 'enable_kvm_access': True, 'enable_chassis_direct_access': False,
+ 'chassis_power_button':
+ {'enable_chassis_power_button': False, 'enable_lcd_override_pin': True,
+ 'disabled_button_lcd_override_pin': "123456"
+ },
+ 'lcd':
+ {'lcd_access': 'VIEW_AND_MODIFY',
+ 'user_defined': 'LCD Text', 'lcd_language': 'en'},
+ 'quick_sync': {'enable_quick_sync_wifi': True, 'enable_inactivity_timeout': True,
+ 'timeout_limit': 10, 'timeout_limit_unit': 'MINUTES',
+ 'enable_read_authentication': True,
+ 'quick_sync_access': 'READ_WRITE'}
+ }}
+ ])
+ def test_ome_devices_lac_success(self, params, ome_conn_mock_lac, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': DOMAIN_FAIL_MSG,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1006",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }},
+ 'mparams': {"hostname": "XX.XX.XX.XX",
+ "device_service_tag": 'ABCD123',
+ 'enable_kvm_access': True, 'enable_chassis_direct_access': False,
+ 'chassis_power_button':
+ {'enable_chassis_power_button': False, 'enable_lcd_override_pin': True,
+ 'disabled_button_lcd_override_pin': "123456"
+ },
+ 'lcd':
+ {'lcd_access': 'VIEW_AND_MODIFY',
+ 'user_defined': 'LCD Text', 'lcd_language': 'en'},
+ 'quick_sync': {'enable_quick_sync_wifi': True, 'enable_inactivity_timeout': True,
+ 'timeout_limit': 10, 'timeout_limit_unit': 'MINUTES',
+ 'enable_read_authentication': True,
+ 'quick_sync_access': 'READ_WRITE'}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': LAC_FAIL_MSG,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1004",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }},
+ 'check_domain_service': 'mocked_check_domain_service',
+ 'get_chassis_device': ('Id', 1234),
+ 'mparams': {"hostname": "XX.XX.XX.XX",
+ 'enable_kvm_access': True, 'enable_chassis_direct_access': False,
+ 'chassis_power_button':
+ {'enable_chassis_power_button': False, 'enable_lcd_override_pin': True,
+ 'disabled_button_lcd_override_pin': "123456"
+ },
+ 'lcd':
+ {'lcd_access': 'VIEW_AND_MODIFY',
+ 'user_defined': 'LCD Text', 'lcd_language': 'en'},
+ 'quick_sync': {'enable_quick_sync_wifi': True, 'enable_inactivity_timeout': True,
+ 'timeout_limit': 10, 'timeout_limit_unit': 'MINUTES',
+ 'enable_read_authentication': True,
+ 'quick_sync_access': 'READ_WRITE'}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Unable to complete the operation because the entered target device id '123' is invalid.",
+ 'mparams': {"hostname": "XX.XX.XX.XX", "device_id": 123,
+ 'enable_kvm_access': True, 'enable_chassis_direct_access': False,
+ 'chassis_power_button':
+ {'enable_chassis_power_button': False, 'enable_lcd_override_pin': True,
+ 'disabled_button_lcd_override_pin': "123456"
+ },
+ 'lcd':
+ {'lcd_access': 'VIEW_AND_MODIFY',
+ 'user_defined': 'LCD Text', 'lcd_language': 'en'},
+ 'quick_sync': {'enable_quick_sync_wifi': True, 'enable_inactivity_timeout': True,
+ 'timeout_limit': 10, 'timeout_limit_unit': 'MINUTES',
+ 'enable_read_authentication': True,
+ 'quick_sync_access': 'READ_WRITE'}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "YY.YY.YY.YY", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': CONFIG_FAIL_MSG,
+ 'mparams': {"hostname": "XX.XX.XX.XX", "device_id": 123}}
+ ])
+ def test_ome_devices_lac_failure(self, params, ome_conn_mock_lac, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ mocks = ["check_domain_service", 'get_chassis_device']
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ if 'http_error_json' in params:
+ json_str = to_text(json.dumps(params.get('http_error_json', {})))
+ ome_conn_mock_lac.invoke_request.side_effect = HTTPError(
+ HTTPS_ADDRESS, 401, HTTP_ERROR_MSG, {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
def test_check_domain_service(self, ome_conn_mock_lac, ome_default_args):
f_module = self.get_module_mock()
result = self.module.check_domain_service(f_module, ome_conn_mock_lac)
assert result is None
def test_get_chassis_device(self, ome_conn_mock_lac, ome_default_args, mocker, ome_response_mock):
- mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ mocker.patch(MODULE_PATH + "get_ip_from_host",
+ return_value="X.X.X.X")
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.168.1.1"]},
+ "PublicAddress": ["XX.XX.XX.XX"]},
{"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
- "PublicAddress": ["192.168.1.2"]}]}
- param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ "PublicAddress": ["YY.YY.YY.YY"]}]}
+ param = {"device_id": 25012, "hostname": "XX.XX.XX.XX",
+ "enable_kvm_access": True}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.get_chassis_device(f_module, ome_conn_mock_lac)
assert err.value.args[0] == "Unable to retrieve the device information."
def test_get_ip_from_host(self, ome_conn_mock_lac, ome_default_args, ome_response_mock):
- result = self.module.get_ip_from_host("192.168.0.1")
- assert result == "192.168.0.1"
+ result = self.module.get_ip_from_host("XX.XX.XX.XX")
+ assert result == "XX.XX.XX.XX"
def test_get_device_details(self, ome_conn_mock_lac, ome_default_args, ome_response_mock, mocker):
- param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ param = {"device_id": 25012, "hostname": "XX.XX.XX.XX",
+ "enable_kvm_access": True}
f_module = self.get_module_mock(params=param)
ome_response_mock.status_code = 200
ome_response_mock.success = True
@@ -72,26 +268,31 @@ class TestOMEMDevicePower(FakeAnsibleModule):
self.module.get_device_details(ome_conn_mock_lac, f_module)
assert err.value.args[0] == "Unable to complete the operation because the entered target " \
"device id '25012' is invalid."
- param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ param = {"device_id": 25012, "hostname": "XX.XX.XX.XX",
+ "enable_kvm_access": True}
f_module = self.get_module_mock(params=param)
- ome_response_mock.json_data = {"value": [{"Id": 25012, "DeviceServiceTag": "GHRT2RL"}], "EnableKvmAccess": True}
- mocker.patch(MODULE_PATH + 'check_mode_validation', return_value={"EnableKvmAccess": True})
+ ome_response_mock.json_data = {"value": [
+ {"Id": 25012, "DeviceServiceTag": "GHRT2RL"}], "EnableKvmAccess": True}
+ mocker.patch(MODULE_PATH + 'check_mode_validation',
+ return_value={"EnableKvmAccess": True})
resp = self.module.get_device_details(ome_conn_mock_lac, f_module)
assert resp.json_data["EnableKvmAccess"] is True
- param = {"hostname": "192.168.1.6", "enable_kvm_access": True}
+ param = {"hostname": "XX.XX.XX.XX", "enable_kvm_access": True}
f_module = self.get_module_mock(params=param)
- mocker.patch(MODULE_PATH + 'get_chassis_device', return_value=("Id", 25011))
+ mocker.patch(MODULE_PATH + 'get_chassis_device',
+ return_value=("Id", 25011))
resp = self.module.get_device_details(ome_conn_mock_lac, f_module)
assert resp.json_data["EnableKvmAccess"] is True
def test_check_mode_validation(self, ome_conn_mock_lac, ome_default_args, ome_response_mock, mocker):
loc_data = {"EnableKvmAccess": True, "EnableChassisDirect": True, "EnableChassisPowerButton": True,
"EnableLcdOverridePin": True, "LcdAccess": True, "LcdCustomString": "LCD Text",
- "LcdLanguage": "en", "LcdOverridePin": 123456, "LcdPresence": "Present",
+ "LcdLanguage": "en", "LcdOverridePin": "123456", "LcdPresence": "Present",
"QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
"TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
"EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}, }
- param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": True}
+ param = {"device_id": 25012, "hostname": "XX.XX.XX.XX",
+ "enable_kvm_access": True}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, loc_data)
@@ -100,7 +301,8 @@ class TestOMEMDevicePower(FakeAnsibleModule):
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, loc_data)
assert err.value.args[0] == "No changes found to be applied."
- param = {"device_id": 25012, "hostname": "192.168.1.6", "enable_kvm_access": False}
+ param = {"device_id": 25012, "hostname": "XX.XX.XX.XX",
+ "enable_kvm_access": False}
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
with pytest.raises(Exception) as err:
@@ -114,21 +316,30 @@ class TestOMEMDevicePower(FakeAnsibleModule):
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
def test_ome_device_power_main_exception_case(self, exc_type, mocker, ome_default_args,
ome_conn_mock_lac, ome_response_mock):
- ome_default_args.update({"device_id": 25011, "enable_kvm_access": True})
+ ome_default_args.update(
+ {"device_id": 25011, "enable_kvm_access": True})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("url open error"))
result = self._run_module(ome_default_args)
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
+ elif exc_type in [HTTPError]:
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module(ome_default_args)
+ assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
index 8133e0167..40fe1b1a2 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.3.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -13,16 +13,22 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
-import pytest
-from ssl import SSLError
from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_location
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_location.'
+PARAM_DATA_CENTER = "data center 1"
+PARAM_ROOM = "room 1"
+PARAM_AISLE = "aisle 1"
+PARAM_RACK = "rack 1"
+PARAM_LOCATION = "location 1"
@pytest.fixture
@@ -34,96 +40,227 @@ def ome_conn_mock_location(mocker, ome_response_mock):
class TestOMEMDeviceLocation(FakeAnsibleModule):
-
module = ome_device_location
def test_check_domain_service(self, ome_conn_mock_location, ome_default_args, mocker):
f_module = self.get_module_mock()
- result = self.module.check_domain_service(f_module, ome_conn_mock_location)
+ result = self.module.check_domain_service(
+ f_module, ome_conn_mock_location)
assert result is None
def test_standalone_chassis(self, ome_conn_mock_location, ome_default_args, mocker, ome_response_mock):
- mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ mocker.patch(MODULE_PATH + "get_ip_from_host",
+ return_value="X.X.X.X")
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.168.1.1"]},
+ "PublicAddress": ["XX.XX.XX.XX"]},
{"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
- "PublicAddress": ["192.168.1.2"]}]}
+ "PublicAddress": ["YY.YY.YY.YY"]}]}
- param = {"data_center": "data center 1", "rack_slot": 2, "device_id": 25012, "hostname": "192.168.1.6",
- "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ param = {"data_center": PARAM_DATA_CENTER, "rack_slot": 2, "device_id": 25012, "hostname": "XY.XY.XY.XY",
+ "room": PARAM_ROOM, "aisle": PARAM_AISLE, "rack": PARAM_RACK, "location": PARAM_LOCATION}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.standalone_chassis(f_module, ome_conn_mock_location)
assert err.value.args[0] == "Failed to fetch the device information."
def test_validate_dictionary(self, ome_conn_mock_location, ome_default_args, mocker):
- param = {"data_center": "data center 1", "rack_slot": 2,
- "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ param = {"data_center": PARAM_DATA_CENTER, "rack_slot": 2,
+ "room": PARAM_ROOM, "aisle": PARAM_AISLE, "rack": PARAM_RACK, "location": PARAM_LOCATION}
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
- loc_resp = {"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
- "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ loc_resp = {"DataCenter": PARAM_DATA_CENTER, "RackSlot": 2, "Room": PARAM_ROOM,
+ "Aisle": PARAM_AISLE, "RackName": PARAM_RACK, "Location": PARAM_LOCATION}
with pytest.raises(Exception) as err:
self.module.validate_dictionary(f_module, loc_resp)
- loc_resp = {"DataCenter": "data center 1", "RackSlot": 3, "Room": "room 1",
- "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ loc_resp = {"DataCenter": PARAM_DATA_CENTER, "RackSlot": 3, "Room": PARAM_ROOM,
+ "Aisle": PARAM_AISLE, "RackName": PARAM_RACK, "Location": PARAM_LOCATION}
with pytest.raises(Exception) as err:
self.module.validate_dictionary(f_module, loc_resp)
assert err.value.args[0] == "Changes found to be applied."
- loc_resp = {"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
- "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ loc_resp = {"DataCenter": PARAM_DATA_CENTER, "RackSlot": 2, "Room": PARAM_ROOM,
+ "Aisle": PARAM_AISLE, "RackName": PARAM_RACK, "Location": PARAM_LOCATION}
f_module.check_mode = False
with pytest.raises(Exception) as err:
self.module.validate_dictionary(f_module, loc_resp)
assert err.value.args[0] == "No changes found to be applied."
- loc_resp = {"DataCenter": "data center 1", "RackSlot": 3, "Room": "room 1",
- "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1"}
+ loc_resp = {"DataCenter": PARAM_DATA_CENTER, "RackSlot": 3, "Room": PARAM_ROOM,
+ "Aisle": PARAM_AISLE, "RackName": PARAM_RACK, "Location": PARAM_LOCATION}
result = self.module.validate_dictionary(f_module, loc_resp)
- assert result == {"DataCenter": "data center 1", "RackSlot": 2,
- "Room": "room 1", "Aisle": "aisle 1", "RackName": "rack 1",
- "Location": "location 1", "SettingType": "Location"}
+ assert result == {"DataCenter": PARAM_DATA_CENTER, "RackSlot": 2,
+ "Room": PARAM_ROOM, "Aisle": PARAM_AISLE, "RackName": PARAM_RACK,
+ "Location": PARAM_LOCATION, "SettingType": "Location"}
def test_device_validation(self, ome_conn_mock_location, ome_default_args, mocker, ome_response_mock):
mocker.patch(MODULE_PATH + "validate_dictionary",
- return_value={"DataCenter": "data center 1", "RackSlot": 2, "Room": "room 1",
- "Aisle": "aisle 1", "RackName": "rack 1", "Location": "location 1",
+ return_value={"DataCenter": PARAM_DATA_CENTER, "RackSlot": 2, "Room": PARAM_ROOM,
+ "Aisle": PARAM_AISLE, "RackName": PARAM_RACK, "Location": PARAM_LOCATION,
"SettingType": "Location"})
- param = {"data_center": "data center 1", "rack_slot": 2, "device_id": 25012,
- "room": "room 1", "aisle": "aisle 1", "rack": "rack 1", "location": "location 1"}
+ param = {"data_center": PARAM_DATA_CENTER, "rack_slot": 2, "device_id": 25012,
+ "room": PARAM_ROOM, "aisle": PARAM_AISLE, "rack": PARAM_RACK, "location": PARAM_LOCATION}
ome_default_args.update(param)
f_module = self.get_module_mock(params=param)
ome_response_mock.status_code = 200
ome_response_mock.success = True
ome_response_mock.json_data = {
- "value": [], "DataCenter": "data center 1",
- "RackSlot": 3, "Room": "room 1", "Aisle": "aisle 1", "RackName": "rack 1",
- "Location": "location 1", "SettingType": "Location", "result": {"RackSlot": 4}}
+ "value": [], "DataCenter": PARAM_DATA_CENTER,
+ "RackSlot": 3, "Room": PARAM_ROOM, "Aisle": PARAM_AISLE, "RackName": PARAM_RACK,
+ "Location": PARAM_LOCATION, "SettingType": "Location", "result": {"RackSlot": 4}}
with pytest.raises(Exception) as err:
self.module.device_validation(f_module, ome_conn_mock_location)
assert err.value.args[0] == "Unable to complete the operation because the entered target " \
"device id '25012' is invalid."
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Successfully updated the location settings.",
+ 'mparams': {"hostname": "1.2.3.4",
+ "device_id": 1234, "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ {"json_data": {"value": [
+ {'Id': 1234, 'DeviceServiceTag': 'ABCD123',
+ 'PublicAddress': "1.2.3.4", 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Successfully updated the location settings.",
+ 'mparams': {"hostname": "1.2.3.4",
+ "device_service_tag": "ABCD123", "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Successfully updated the location settings.",
+ 'mparams': {"hostname": "1.2.3.4",
+ "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "dummyhost_shouldnotexist",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Successfully updated the location settings.",
+ 'mparams': {"hostname": "dummyhost_shouldnotexist",
+ "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ }
+ ])
+ def test_ome_devices_location_success(self, params, ome_conn_mock_location, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "The device location settings operation is supported only on OpenManage Enterprise Modular systems.",
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1006",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }
+ },
+ 'mparams': {"hostname": "1.2.3.4",
+ "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Unable to complete the operation because the location settings are not supported on the specified device.",
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1004",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }
+ },
+ 'check_domain_service': 'mocked_check_domain_service',
+ 'standalone_chassis': ('Id', 1234),
+ 'mparams': {"hostname": "1.2.3.4",
+ "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': "Unable to complete the operation because the entered target device id '123' is invalid.",
+ 'mparams': {"hostname": "1.2.3.4", "device_id": 123,
+ "data_center": "data center",
+ "room": "room", "aisle": "aisle", "rack": "rack"}
+ },
+ ])
+ def test_ome_devices_location_failure(self, params, ome_conn_mock_location, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ mocks = ["check_domain_service", "standalone_chassis"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ if 'http_error_json' in params:
+ json_str = to_text(json.dumps(params.get('http_error_json', {})))
+ ome_conn_mock_location.invoke_request.side_effect = HTTPError(
+ 'https://testhost.com', 401, 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
def test_ome_device_location_main_exception_case(self, exc_type, mocker, ome_default_args,
ome_conn_mock_location, ome_response_mock):
- ome_default_args.update({"device_id": 25011, "data_center": "data center 1",
- "room": "room 1", "aisle": "aisle 1", "rack": "rack 1",
- "rack_slot": "2", "location": "location 1"})
+ ome_default_args.update({"device_id": 25011, "data_center": PARAM_DATA_CENTER,
+ "room": PARAM_ROOM, "aisle": PARAM_AISLE, "rack": PARAM_RACK,
+ "rack_slot": "2", "location": PARAM_LOCATION})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("url open error"))
result = self._run_module(ome_default_args)
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
index 692061430..004586393 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.2.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -401,7 +401,7 @@ class TestOmeDeviceMgmtNetwork(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'validate_input',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py
index 0a68ac9d4..b3e258ffe 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_network_services.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -13,7 +13,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
-import pdb
import pytest
from ssl import SSLError
@@ -22,8 +21,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_network_services
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_network_services.'
@@ -48,7 +46,7 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
def test_check_domain_service_http(self, ome_conn_mock_network, ome_default_args, mocker):
f_module = self.get_module_mock()
err_message = {'error': {'@Message.ExtendedInfo': [{'MessageId': 'CGEN1006'}]}}
- ome_conn_mock_network.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ ome_conn_mock_network.invoke_request.side_effect = HTTPError('https://testhost.com', 400,
json.dumps(err_message),
{"accept-type": "application/json"}, None)
mocker.patch(MODULE_PATH + 'json.loads', return_value=err_message)
@@ -58,19 +56,19 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
"OpenManage Enterprise Modular."
def test_get_chassis_device(self, ome_conn_mock_network, ome_default_args, mocker, ome_response_mock):
- mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="X.X.X.X")
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.168.1.1"]},
+ "PublicAddress": ["XX.XX.XX.XX"]},
{"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
- "PublicAddress": ["192.168.1.2"]}]}
- param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ "PublicAddress": ["YY.YY.YY.YY"]}]}
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": True}}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.get_chassis_device(f_module, ome_conn_mock_network)
assert err.value.args[0] == "Failed to retrieve the device information."
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.18.1.1"]}]}
- param = {"hostname": "192.18.1.1", "remote_racadm_settings": {"enabled": True}}
+ "PublicAddress": ["X.X.X.X"]}]}
+ param = {"hostname": "X.X.X.X", "remote_racadm_settings": {"enabled": True}}
f_module = self.get_module_mock(params=param)
key, value = self.module.get_chassis_device(f_module, ome_conn_mock_network)
assert key == "Id"
@@ -88,7 +86,7 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
"SnmpV1V2Credential": {"CommunityName": "public"}},
"SshConfiguration": {"IdleTimeout": 60, "MaxAuthRetries": 3, "MaxSessions": 1,
"PortNumber": 22, "SshEnabled": False}}
- ome_default_args.update({"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True},
+ ome_default_args.update({"device_id": 25012, "hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": True},
"snmp_settings": {"enabled": True, "port_number": 161, "community_name": "public"},
"ssh_settings": {"enabled": True, "port_number": 22, "max_sessions": 1,
"max_auth_retries": 3, "idle_timeout": 60}})
@@ -96,7 +94,7 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
assert resp['msg'] == "Successfully updated the network services settings."
def test_fetch_device_details(self, ome_conn_mock_network, ome_default_args, ome_response_mock, mocker):
- param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": True}}
f_module = self.get_module_mock(params=param)
ome_response_mock.status_code = 200
ome_response_mock.success = True
@@ -115,18 +113,18 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
"EnableRemoteRacadm": True, "SnmpConfiguration": {}, "SshConfiguration": {}}
resp = self.module.fetch_device_details(f_module, ome_conn_mock_network)
assert resp.json_data["SnmpConfiguration"] == {}
- param = {"hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True}}
+ param = {"hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": True}}
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + "get_chassis_device", return_value=("Id", "25012"))
resp = self.module.fetch_device_details(f_module, ome_conn_mock_network)
assert resp.json_data["SnmpConfiguration"] == {}
def test_get_ip_from_host(self, ome_conn_mock_network, ome_default_args, ome_response_mock):
- result = self.module.get_ip_from_host("192.168.0.1")
- assert result == "192.168.0.1"
+ result = self.module.get_ip_from_host("ZZ.ZZ.ZZ.ZZ")
+ assert result == "ZZ.ZZ.ZZ.ZZ"
def test_check_mode_validation(self, ome_conn_mock_network, ome_default_args, ome_response_mock):
- param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": True},
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": True},
"snmp_settings": {"enabled": True, "port_number": 161, "community_name": "public"},
"ssh_settings": {"enabled": True, "port_number": 22, "max_sessions": 1,
"max_auth_retries": 3, "idle_timeout": 120}}
@@ -152,7 +150,7 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, loc_data, ome_conn_mock_network)
assert err.value.args[0] == "Changes found to be applied."
- param = {"device_id": 25012, "hostname": "192.168.1.6", "remote_racadm_settings": {"enabled": False},
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y", "remote_racadm_settings": {"enabled": False},
"snmp_settings": {"enabled": False, "port_number": 161, "community_name": "public"},
"ssh_settings": {"enabled": False, "port_number": 22, "max_sessions": 1,
"max_auth_retries": 3, "idle_timeout": 60}}
@@ -178,7 +176,7 @@ class TestOMEMDeviceNetworkService(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
index 928c407c3..553a57369 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,11 +20,22 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_power_settings
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
-from mock import MagicMock, patch, Mock
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_power_settings.'
+DEVICE_FAIL_MSG = "Unable to complete the operation because the entered target device {0} '{1}' is invalid."
+CONFIG_FAIL_MSG = "one of the following is required: power_configuration, " \
+ "redundancy_configuration, hot_spare_configuration"
+CHANGES_FOUND = "Changes found to be applied."
+NO_CHANGES_FOUND = "No changes found to be applied."
+SUCCESS_MSG = "Successfully updated the power settings."
+FETCH_FAIL_MSG = "Failed to fetch the device information."
+POWER_FAIL_MSG = "Unable to complete the operation because the power settings " \
+ "are not supported on the specified device."
+DOMAIN_FAIL_MSG = "The device location settings operation is supported only on " \
+ "OpenManage Enterprise Modular."
+
@pytest.fixture
def ome_conn_mock_power(mocker, ome_response_mock):
@@ -38,18 +49,212 @@ class TestOMEMDevicePower(FakeAnsibleModule):
module = ome_device_power_settings
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceServiceTag': 'ABCD123', "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ "EnableHotSpare": True,
+ "EnablePowerCapSettings": True,
+ "MaxPowerCap": "3424",
+ "MinPowerCap": "3291",
+ "PowerCap": "3425",
+ "PrimaryGrid": "GRID_1",
+ "RedundancyPolicy": "NO_REDUNDANCY",
+ "SettingType": "Power"},
+ 'message': SUCCESS_MSG,
+ 'mparams': {"hostname": "1.2.3.4",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424},
+ "hot_spare_configuration": {"enable_hot_spare": False, "primary_grid": "GRID_1"},
+ "device_id": 1234,
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceServiceTag': 'ABCD123', "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ "EnableHotSpare": True,
+ "EnablePowerCapSettings": True,
+ "MaxPowerCap": "3424",
+ "MinPowerCap": "3291",
+ "PowerCap": "3425",
+ "PrimaryGrid": "GRID_1",
+ "RedundancyPolicy": "NO_REDUNDANCY",
+ "SettingType": "Power"},
+ 'message': SUCCESS_MSG,
+ 'mparams': {"hostname": "1.2.3.4",
+ "power_configuration": {"enable_power_cap": False, "power_cap": 3424},
+ "hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"},
+ "device_service_tag": 'ABCD123',
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ "EnableHotSpare": True,
+ "EnablePowerCapSettings": True,
+ "MaxPowerCap": "3424",
+ "MinPowerCap": "3291",
+ "PowerCap": "3425",
+ "PrimaryGrid": "GRID_1",
+ "RedundancyPolicy": "NO_REDUNDANCY",
+ "SettingType": "Power"},
+ 'message': SUCCESS_MSG,
+ 'mparams': {"hostname": "1.2.3.4",
+ "power_configuration": {"enable_power_cap": False, "power_cap": 3424},
+ "hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "dummyhostname_shouldnotexist",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ "EnableHotSpare": True,
+ "EnablePowerCapSettings": True,
+ "MaxPowerCap": "3424",
+ "MinPowerCap": "3291",
+ "PowerCap": "3425",
+ "PrimaryGrid": "GRID_1",
+ "RedundancyPolicy": "NO_REDUNDANCY",
+ "SettingType": "Power"},
+ 'message': SUCCESS_MSG,
+ 'mparams': {"hostname": "dummyhostname_shouldnotexist",
+ "power_configuration": {"enable_power_cap": False, "power_cap": 3424},
+ "hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"}
+ }}
+ ])
+ def test_ome_devices_power_settings_success(self, params, ome_conn_mock_power, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': DOMAIN_FAIL_MSG,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1006",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }},
+ 'mparams': {"hostname": "1.2.3.4",
+ "device_service_tag": 'ABCD123',
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': POWER_FAIL_MSG,
+ 'check_domain_service': 'mocked_check_domain_service',
+ 'get_chassis_device': ('Id', 1234),
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1004",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }},
+ 'mparams': {"hostname": "1.2.3.4",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': POWER_FAIL_MSG,
+ 'check_domain_service': 'mocked_check_domain_service',
+ 'get_chassis_device': ('Id', 1234),
+ 'http_err_code': 404,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1004",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }},
+ 'mparams': {"hostname": "1.2.3.4",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': DEVICE_FAIL_MSG.format('id', 123),
+ 'check_domain_service': 'mocked_check_domain_service',
+ 'get_chassis_device': ('Id', 1234),
+ 'mparams': {"hostname": "1.2.3.4", 'device_id': 123,
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
+ }},
+ {"json_data": {"value": [
+ {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'message': CONFIG_FAIL_MSG,
+ 'mparams': {"hostname": "1.2.3.4", "device_id": 123}}
+ ])
+ def test_ome_devices_power_settings_failure(self, params, ome_conn_mock_power, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ mocks = ["check_domain_service", 'get_chassis_device']
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ if 'http_error_json' in params:
+ json_str = to_text(json.dumps(params.get('http_error_json', {})))
+ ome_conn_mock_power.invoke_request.side_effect = HTTPError(
+ 'https://testhost.com', params.get('http_err_code', 401), 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
def test_check_domain_service(self, ome_conn_mock_power, ome_default_args):
f_module = self.get_module_mock()
- result = self.module.check_domain_service(f_module, ome_conn_mock_power)
+ result = self.module.check_domain_service(
+ f_module, ome_conn_mock_power)
assert result is None
def test_get_chassis_device(self, ome_conn_mock_power, ome_default_args, mocker, ome_response_mock):
- mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ mocker.patch(MODULE_PATH + "get_ip_from_host",
+ return_value="X.X.X.X")
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.168.1.1"]},
+ "PublicAddress": ["XX.XX.XX.XX"]},
{"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
- "PublicAddress": ["192.168.1.2"]}]}
- param = {"device_id": 25012, "hostname": "192.168.1.6",
+ "PublicAddress": ["YY.YY.YY.YY"]}]}
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y",
"power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
@@ -60,7 +265,8 @@ class TestOMEMDevicePower(FakeAnsibleModule):
loc_data = {"PowerCap": "3424", "MinPowerCap": "3291", "MaxPowerCap": "3424",
"RedundancyPolicy": "NO_REDUNDANCY", "EnablePowerCapSettings": True,
"EnableHotSpare": True, "PrimaryGrid": "GRID_1", "PowerBudgetOverride": False}
- param = {"power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
+ param = {"power_configuration": {
+ "enable_power_cap": True, "power_cap": 3424}}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, loc_data)
@@ -70,7 +276,8 @@ class TestOMEMDevicePower(FakeAnsibleModule):
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, loc_data)
assert err.value.args[0] == "Changes found to be applied."
- param = {"redundancy_configuration": {"redundancy_policy": "NO_REDUNDANCY"}}
+ param = {"redundancy_configuration": {
+ "redundancy_policy": "NO_REDUNDANCY"}}
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
with pytest.raises(Exception) as err:
@@ -78,7 +285,7 @@ class TestOMEMDevicePower(FakeAnsibleModule):
assert err.value.args[0] == "No changes found to be applied."
def test_fetch_device_details(self, ome_conn_mock_power, ome_default_args, ome_response_mock):
- param = {"device_id": 25012, "hostname": "192.168.1.6",
+ param = {"device_id": 25012, "hostname": "Y.Y.Y.Y",
"power_configuration": {"enable_power_cap": True, "power_cap": 3424}}
f_module = self.get_module_mock(params=param)
ome_response_mock.status_code = 200
@@ -93,8 +300,8 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"device id '25012' is invalid."
def test_get_ip_from_host(self, ome_conn_mock_power, ome_default_args, ome_response_mock):
- result = self.module.get_ip_from_host("192.168.0.1")
- assert result == "192.168.0.1"
+ result = self.module.get_ip_from_host("ZZ.ZZ.ZZ.ZZ")
+ assert result == "ZZ.ZZ.ZZ.ZZ"
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
@@ -106,16 +313,18 @@ class TestOMEMDevicePower(FakeAnsibleModule):
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("url open error"))
result = self._run_module(ome_default_args)
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
index 97b611cee..60b8c17cc 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.3.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,9 +20,12 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_device_quick_deploy
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_device_quick_deploy.'
+ACCESS_TYPE = "application/json"
+HTTP_ADDRESS = 'https://testhost.com'
+HTTP_ERROR_MSG = 'http error message'
@pytest.fixture
@@ -42,51 +45,72 @@ class TestOMEMDevicePower(FakeAnsibleModule):
result = self.module.check_domain_service(f_module, ome_conn_mock_qd)
assert result is None
+ @pytest.mark.parametrize("exc_type", [HTTPError])
+ def test_check_domain_service_http(self, exc_type, ome_conn_mock_qd, ome_default_args):
+ f_module = self.get_module_mock()
+ json_str = to_text(json.dumps({"error": {"@Message.ExtendedInfo": [{"MessageId": "CGEN1006"}]}}))
+ if exc_type == HTTPError:
+ ome_conn_mock_qd.invoke_request.side_effect = exc_type(
+ HTTP_ADDRESS, 400, HTTP_ERROR_MSG, {"accept-type": ACCESS_TYPE},
+ StringIO(json_str)
+ )
+ with pytest.raises(Exception) as err:
+ self.module.check_domain_service(f_module, ome_conn_mock_qd)
+ assert err.value.args[0] == "The operation to configure the Quick Deploy settings is supported only " \
+ "on OpenManage Enterprise Modular."
+
def test_get_chassis_device(self, ome_conn_mock_qd, ome_default_args, mocker, ome_response_mock):
- mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="192.18.1.1")
+ mocker.patch(MODULE_PATH + "get_ip_from_host", return_value="X.X.X.X")
ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
- "PublicAddress": ["192.168.1.1"]},
+ "PublicAddress": ["ZZ.ZZ.ZZ.ZZ"]},
{"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
- "PublicAddress": ["192.168.1.2"]}]}
- param = {"device_id": 25012, "hostname": "192.168.1.6"}
+ "PublicAddress": ["ZX.ZX.ZX.ZX"]}]}
+ param = {"device_id": 25012, "hostname": "XY.XY.XY.XY"}
f_module = self.get_module_mock(params=param)
with pytest.raises(Exception) as err:
self.module.get_chassis_device(f_module, ome_conn_mock_qd)
assert err.value.args[0] == "Unable to retrieve the device information."
+ ome_response_mock.json_data = {"value": [{"DeviceId": 25011, "DomainRoleTypeValue": "LEAD",
+ "PublicAddress": ["ZZ.ZZ.ZZ.ZZ"]},
+ {"DeviceId": 25012, "DomainRoleTypeValue": "STANDALONE",
+ "PublicAddress": ["X.X.X.X"]}]}
+ result = self.module.get_chassis_device(f_module, ome_conn_mock_qd)
+ assert result[0] == "Id"
+ assert result[1] == 25012
def test_get_ip_from_host(self, ome_conn_mock_qd, ome_default_args, ome_response_mock):
- result = self.module.get_ip_from_host("192.168.0.1")
- assert result == "192.168.0.1"
+ result = self.module.get_ip_from_host("XX.XX.XX.XX")
+ assert result == "XX.XX.XX.XX"
def test_validate_ip_address(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
result = self.module.validate_ip_address("192.168.0.1", "IPV4")
assert result is True
- result = self.module.validate_ip_address("192.168.0.1.1", "IPV4")
+ result = self.module.validate_ip_address("XX.XX.XX.XX.1", "IPV4")
assert result is False
result = self.module.validate_ip_address("::", "IPV6")
assert result is True
def test_ip_address_field(self, ome_conn_mock_qd, ome_response_mock, ome_default_args, mocker):
param = {"device_id": 25011, "setting_type": "ServerQuickDeploy",
- "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "192.168.0.1",
+ "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "XX.XX.XX.XX",
"ipv4_gateway": "0.0.0.0.0"}, "slots": [{"vlan_id": 1}]}
fields = [("ipv4_subnet_mask", "IPV4"), ("ipv4_gateway", "IPV4"), ("ipv6_gateway", "IPV6")]
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + "validate_ip_address", return_value=False)
with pytest.raises(Exception) as err:
self.module.ip_address_field(f_module, fields, param["quick_deploy_options"], slot=False)
- assert err.value.args[0] == "Invalid '192.168.0.1' address provided for the ipv4_subnet_mask."
+ assert err.value.args[0] == "Invalid 'XX.XX.XX.XX' address provided for the ipv4_subnet_mask."
def test_get_device_details(self, ome_conn_mock_qd, ome_response_mock, ome_default_args, mocker):
- param = {"device_id": 25012, "hostname": "192.168.1.6", "setting_type": "ServerQuickDeploy",
- "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "192.168.0.1",
+ param = {"device_id": 25012, "hostname": "XY.XY.XY.XY", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "XX.XX.XX.XX",
"ipv4_gateway": "0.0.0.0"}, "slots": [{"vlan_id": 1}]}
f_module = self.get_module_mock(params=param)
ome_response_mock.status_code = 200
ome_response_mock.success = True
ome_response_mock.json_data = {"value": [], "SettingType": "ServerQuickDeploy",
"ProtocolTypeV4": "true", "NetworkTypeV4": "Static",
- "IpV4Gateway": "192.168.0.1", "IpV4SubnetMask": "255.255.255.0"}
+ "IpV4Gateway": "XX.XX.XX.XX", "IpV4SubnetMask": "XXX.XXX.XXX.XXX"}
mocker.patch(MODULE_PATH + 'get_chassis_device', return_value=("Id", 25011))
mocker.patch(MODULE_PATH + "check_mode_validation", return_value=({}, {}))
mocker.patch(MODULE_PATH + "job_payload_submission", return_value=12345)
@@ -99,38 +123,77 @@ class TestOMEMDevicePower(FakeAnsibleModule):
f_module = self.get_module_mock(params=param)
result = self.module.get_device_details(ome_conn_mock_qd, f_module)
assert result == (12345, None)
- param.update({"job_wait": True})
+ param.update({"job_wait": True, "job_wait_timeout": 60})
+ ome_conn_mock_qd.job_tracking.return_value = (True, "error message")
+ with pytest.raises(Exception) as err:
+ self.module.get_device_details(ome_conn_mock_qd, f_module)
+ assert err.value.args[0] == "Unable to deploy the Quick Deploy settings."
+ ome_conn_mock_qd.job_tracking.return_value = (False, "error message")
+ result = self.module.get_device_details(ome_conn_mock_qd, f_module)
+ assert result[0] == 12345
+
+ @pytest.mark.parametrize("exc_type", [HTTPError])
+ def test_get_device_details_http(self, exc_type, ome_conn_mock_qd, ome_response_mock, ome_default_args, mocker):
+ param = {"hostname": "XY.XY.XY.XY", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {"ipv4_enabled": False, "ipv4_subnet_mask": "XX.XX.XX.XX",
+ "ipv4_gateway": "0.0.0.0"}, "slots": [{"vlan_id": 1}]}
+ mocker.patch(MODULE_PATH + 'get_chassis_device', return_value=("Id", 25011))
+ json_str = to_text(json.dumps({"error": {"@Message.ExtendedInfo": [{"MessageId": "CGEN1004"}]}}))
+ if exc_type == HTTPError:
+ ome_conn_mock_qd.invoke_request.side_effect = exc_type(
+ HTTP_ADDRESS, 400, HTTP_ERROR_MSG, {"accept-type": ACCESS_TYPE},
+ StringIO(json_str)
+ )
+ f_module = self.get_module_mock(params=param)
+ with pytest.raises(Exception) as err:
+ self.module.get_device_details(ome_conn_mock_qd, f_module)
+ assert err.value.args[0] == "Unable to complete the operation because the Server Quick Deploy configuration " \
+ "settings are not supported on the specified device."
def test_job_payload_submission(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
ome_response_mock.status_code = 200
ome_response_mock.success = True
ome_response_mock.json_data = {"Id": 12345}
ome_conn_mock_qd.job_submission.return_value = ome_response_mock
- payload = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "255.255.255.0",
+ payload = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
"IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
"PrefixLength": "1", "IpV6Gateway": "0.0.0.0"}
- slot_payload = [{"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1}]
+ slot_payload = [{"SlotId": 1, "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::", "VlanId": 1}]
+ resp_data = {"Slots": [
+ {"SlotId": 1, "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
+ {"SlotId": 2, "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
+ ]}
+ result = self.module.job_payload_submission(ome_conn_mock_qd, payload, slot_payload,
+ "ServerQuickDeploy", 25012, resp_data)
+ assert result == 12345
+
+ payload = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0", "rootCredential": "secret"}
+ slot_payload = [{"SlotId": 1, "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::", "VlanId": 1}]
resp_data = {"Slots": [
- {"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
- {"SlotId": 1, "IPV4Address": "192.168.0.2", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False},
+ {"SlotId": 1, "SlotIPV4Address": "YY.YY.YY.YY", "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::",
+ "VlanId": 1, "SlotSelected": False, "SlotIPV6Address": "::"},
+ {"SlotId": 2, "IPV4Address": "YY.YY.YY.YY", "IPV6Address": "::", "VlanId": 1, "SlotSelected": False,
+ "SlotIPV4Address": "YY.YY.YY.YY", "SlotIPV6Address": "::"},
]}
result = self.module.job_payload_submission(ome_conn_mock_qd, payload, slot_payload,
"ServerQuickDeploy", 25012, resp_data)
assert result == 12345
def test_check_mode_validation(self, ome_conn_mock_qd, ome_response_mock, ome_default_args):
- param = {"device_id": 25012, "hostname": "192.168.1.6", "setting_type": "ServerQuickDeploy",
+ param = {"device_id": 25012, "hostname": "XY.XY.XY.XY", "setting_type": "ServerQuickDeploy",
"quick_deploy_options": {
- "ipv4_enabled": True, "ipv4_network_type": "Static", "ipv4_subnet_mask": "255.255.255.0",
+ "ipv4_enabled": True, "ipv4_network_type": "Static", "ipv4_subnet_mask": "XXX.XXX.XXX.XXX",
"ipv4_gateway": "0.0.0.0", "ipv6_enabled": True, "ipv6_network_type": "Static",
"ipv6_prefix_length": "1", "ipv6_gateway": "0.0.0.0",
- "slots": [{"slot_id": 1, "slot_ipv4_address": "192.168.0.1",
+ "slots": [{"slot_id": 1, "slot_ipv4_address": "XX.XX.XX.XX",
"slot_ipv6_address": "::", "vlan_id": "1"}]}}
f_module = self.get_module_mock(params=param)
- deploy_data = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "255.255.255.0",
+ deploy_data = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
"IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
"PrefixLength": "1", "IpV6Gateway": "0.0.0.0",
- "Slots": [{"SlotId": 1, "SlotIPV4Address": "192.168.0.1", "SlotIPV6Address": "::", "VlanId": "1"}]}
+ "Slots": [{"SlotId": 1, "SlotIPV4Address": "XX.XX.XX.XX", "SlotIPV6Address": "::", "VlanId": "1"}]}
with pytest.raises(Exception) as err:
self.module.check_mode_validation(f_module, deploy_data)
assert err.value.args[0] == "No changes found to be applied."
@@ -145,6 +208,48 @@ class TestOMEMDevicePower(FakeAnsibleModule):
f_module.check_mode = False
result = self.module.check_mode_validation(f_module, deploy_data)
assert result[0]["NetworkTypeV4"] == "Static"
+ param["quick_deploy_options"].update({"password": "secret", "ipv4_enabled": False, "ipv6_enabled": False,
+ "ProtocolTypeV4": False, "ProtocolTypeV6": False})
+ deploy_data = {"ProtocolTypeV4": False, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": False, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0",
+ "Slots": [{"SlotId": 1, "SlotIPV4Address": "XX.XX.XX.XX", "SlotIPV6Address": "::",
+ "VlanId": "1"}]}
+ f_module = self.get_module_mock(params=param)
+ result = self.module.check_mode_validation(f_module, deploy_data)
+ assert result[0]["NetworkTypeV4"] == "Static"
+ param = {"device_id": 25012, "hostname": "XY.XY.XY.XY", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {
+ "ipv4_enabled": True, "ipv4_network_type": "Static", "ipv4_subnet_mask": "XXX.XXX.XXX.XXX",
+ "ipv4_gateway": "0.0.0.0", "ipv6_enabled": True, "ipv6_network_type": "Static",
+ "ipv6_prefix_length": "1", "ipv6_gateway": "0.0.0.0",
+ "slots": [{"slot_id": 1, "slot_ipv4_address": "XX.XX.XX.XX",
+ "slot_ipv6_address": "::", "vlan_id": "1"}]}}
+ f_module = self.get_module_mock(params=param)
+ deploy_data = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0",
+ "Slots": [{"SlotId": 2, "SlotIPV4Address": "XX.XX.XX.XX", "SlotIPV6Address": "::",
+ "VlanId": "1"}]}
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, deploy_data)
+ assert err.value.args[0] == "Unable to complete the operation because the entered slot(s) '1' does not exist."
+ param = {"device_id": 25012, "hostname": "XY.XY.XY.XY", "setting_type": "ServerQuickDeploy",
+ "quick_deploy_options": {
+ "ipv4_enabled": True, "ipv4_network_type": "Static", "ipv4_subnet_mask": "XXX.XXX.XXX.XXX",
+ "ipv4_gateway": "0.0.0.0", "ipv6_enabled": True, "ipv6_network_type": "Static",
+ "ipv6_prefix_length": "1", "ipv6_gateway": "0.0.0.0",
+ "slots": [{"slot_id": 5, "slot_ipv4_address": "XX.XX.XX.XX",
+ "slot_ipv6_address": "::", "vlan_id": ""}]}}
+ f_module = self.get_module_mock(params=param)
+ deploy_data = {"ProtocolTypeV4": True, "NetworkTypeV4": "Static", "IpV4SubnetMask": "XXX.XXX.XXX.XXX",
+ "IpV4Gateway": "0.0.0.0", "ProtocolTypeV6": True, "NetworkTypeV6": "Static",
+ "PrefixLength": "1", "IpV6Gateway": "0.0.0.0",
+ "Slots": [{"SlotId": 5, "SlotIPV4Address": "XX.XX.XX.XX",
+ "SlotIPV6Address": "::", "VlanId": ""}]}
+ with pytest.raises(Exception) as err:
+ self.module.check_mode_validation(f_module, deploy_data)
+ assert err.value.args[0] == "No changes found to be applied."
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
@@ -166,8 +271,22 @@ class TestOMEMDevicePower(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
+ side_effect=exc_type(HTTP_ADDRESS, 400, HTTP_ERROR_MSG,
+ {"accept-type": ACCESS_TYPE}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
+
+ def test_main(self, mocker, ome_default_args, ome_conn_mock_qd, ome_response_mock):
+ mocker.patch(MODULE_PATH + 'check_domain_service', return_value=None)
+ mocker.patch(MODULE_PATH + 'ip_address_field', return_value=None)
+ mocker.patch(MODULE_PATH + 'get_device_details', return_value=("JID_123456789", {"Status": "Success"}))
+ ome_default_args.update({"device_id": 25011, "setting_type": "ServerQuickDeploy", "validate_certs": False,
+ "quick_deploy_options": {"ipv4_enabled": False,
+ "slots": [{"slot_id": 1, "vlan_id": 1}]}})
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Successfully deployed the Quick Deploy settings."
+ assert result["job_id"] == "JID_123456789"
+ mocker.patch(MODULE_PATH + 'get_device_details', return_value=("JID_135792468", None))
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == "Successfully submitted the Quick Deploy job settings."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
index 94e76df11..23148d390 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
# Version 6.1.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -460,7 +460,7 @@ class TestOmeDevices(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_dev_ids',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py
index 79c94b5cb..ca6bfd7f9 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_diagnostics.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_diagnostics
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_diagnostics.'
@@ -83,7 +83,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
"are not applicable for export log."
def test_extract_log_operation(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1",
+ f_module = self.get_module_mock(params={"log_type": "application", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
@@ -100,7 +100,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
result = self.module.extract_log_operation(f_module, ome_conn_mock_diagnostics)
assert result["Id"] == 16011
- f_module = self.get_module_mock(params={"log_type": "support_assist_collection", "share_address": "192.168.0.1",
+ f_module = self.get_module_mock(params={"log_type": "support_assist_collection", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"]})
@@ -108,7 +108,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
assert result["Id"] == 16011
def test_extract_log_operation_member(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- f_module = self.get_module_mock(params={"log_type": "application", "share_address": "192.168.0.1",
+ f_module = self.get_module_mock(params={"log_type": "application", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
@@ -123,7 +123,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
def test_extract_log_operation_no_lead_chassis(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application",
- "share_address": "192.168.0.1",
+ "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], })
@@ -134,7 +134,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
def test_extract_log_operation_s1(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
f_module = self.get_module_mock(params={"lead_chassis_only": False, "log_type": "application",
- "share_address": "192.168.0.1",
+ "share_address": "XX.XX.XX.XX",
"share_type": "NFS",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"], })
ome_response_mock.json_data = {"value": [{"Id": 16011, "Type": 2000}]}
@@ -143,7 +143,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
assert result["Id"] == 16011
def test_main_succes_case(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- ome_default_args.update({"log_type": "support_assist_collection", "share_address": "192.168.0.1",
+ ome_default_args.update({"log_type": "support_assist_collection", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
@@ -170,7 +170,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
"share domain, and share credentials provided are correct."
def test_main_succes_case02(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- ome_default_args.update({"log_type": "supportassist_collection", "share_address": "192.168.0.1",
+ ome_default_args.update({"log_type": "supportassist_collection", "share_address": "XX.XX.XX.XX",
"share_type": "CIFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
@@ -197,7 +197,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
"share domain, and share credentials provided are correct."
def test_main_succes_case03(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- ome_default_args.update({"log_type": "application", "share_address": "192.168.0.1",
+ ome_default_args.update({"log_type": "application", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "share_name": "iso", "mask_sensitive_info": "true",
"test_connection": True, "job_wait": True, "device_ids": [25011]})
mocker.patch(MODULE_PATH + "check_domain_service", return_value=None)
@@ -222,7 +222,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
"share domain, and share credentials provided are correct."
def test_main_succes_case04(self, ome_conn_mock_diagnostics, ome_response_mock, ome_default_args, mocker):
- ome_default_args.update({"log_type": "supportassist_collection", "share_address": "192.168.0.1",
+ ome_default_args.update({"log_type": "supportassist_collection", "share_address": "XX.XX.XX.XX",
"share_type": "CIFS", "share_name": "iso", "share_user": "username",
"share_password": "password", "share_domain": "domain",
"mask_sensitive_info": "true", "log_selectors": ["OS_LOGS"],
@@ -252,7 +252,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
def test_ome_diagnostics_main_exception_case(self, exc_type, mocker, ome_default_args,
ome_conn_mock_diagnostics, ome_response_mock):
- ome_default_args.update({"log_type": "application", "share_address": "192.168.0.1",
+ ome_default_args.update({"log_type": "application", "share_address": "XX.XX.XX.XX",
"share_type": "NFS", "mask_sensitive_info": False})
ome_response_mock.status_code = 400
ome_response_mock.success = False
@@ -267,7 +267,7 @@ class TestOMEDiagnostics(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py
index e84e7c7e2..0b5ee8290 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_discovery.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.3.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_discovery
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_discovery.'
NO_CHANGES_MSG = "No changes found to be applied."
@@ -152,8 +152,7 @@ class TestOmeDiscovery(FakeAnsibleModule):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
ome_connection_mock_for_discovery.get_all_items_with_pagination.return_value = params['pag_ret_val']
- f_module = self.get_module_mock()
- ips = self.module.get_execution_details(f_module, ome_connection_mock_for_discovery, 1)
+ ips, job_status = self.module.get_execution_details(ome_connection_mock_for_discovery, 1)
assert ips == params['ips']
@pytest.mark.parametrize("params", [{"json_data": {'JobStatusId': 2060}, 'job_wait_sec': 60, 'job_failed': False,
@@ -166,9 +165,8 @@ class TestOmeDiscovery(FakeAnsibleModule):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
- job_failed, msg = self.module.discovery_job_tracking(ome_connection_mock_for_discovery, 1,
- params['job_wait_sec'])
- assert job_failed == params['job_failed']
+ msg = self.module.discovery_job_tracking(ome_connection_mock_for_discovery, 1,
+ params['job_wait_sec'])
assert msg == params['msg']
@pytest.mark.parametrize("params", [{"discovery_json": {'DiscoveryConfigTaskParam': [{'TaskId': 12}]},
@@ -223,8 +221,7 @@ class TestOmeDiscovery(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'get_connection_profile', return_value=params['get_conn_json'])
disc_cfg_list = self.module.get_discovery_config(f_module, ome_connection_mock_for_discovery)
assert disc_cfg_list[0]['DeviceType'] == params['DeviceType']
- assert disc_cfg_list[0]['DiscoveryConfigTargets'] == params[
- 'DiscoveryConfigTargets'] # assert disc_cfg_list == params['disc_cfg_list']
+ assert disc_cfg_list[0]['DiscoveryConfigTargets'] == params['DiscoveryConfigTargets']
@pytest.mark.parametrize("params", [{"json_data": {"@odata.type": "#DiscoveryConfigService.DiscoveryJob",
"@odata.id": "/api/DiscoveryConfigService/Jobs(12617)",
@@ -243,20 +240,22 @@ class TestOmeDiscovery(FakeAnsibleModule):
assert djob == params['djob']
@pytest.mark.parametrize("params", [
- {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': False, 'job_message': DISCOVER_JOB_COMPLETE,
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_message': DISCOVER_JOB_COMPLETE,
'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
- {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True, 'job_message': JOB_TRACK_FAIL,
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_message': JOB_TRACK_FAIL,
'mparams': {'job_wait': True, 'schedule': 'RunNow', 'job_wait_timeout': 1000}},
- {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_failed': True, 'job_message': DISCOVERY_SCHEDULED,
+ {"json_data": {"DiscoveryConfigGroupName": 'd1'}, 'job_message': DISCOVERY_SCHEDULED,
'mparams': {'job_wait': False, 'schedule': 'RunLater', 'job_wait_timeout': 1000}}])
def test_create_discovery(self, params, mocker, ome_connection_mock_for_discovery, ome_response_mock):
mocker.patch(MODULE_PATH + 'get_discovery_config', return_value={})
mocker.patch(MODULE_PATH + 'get_schedule', return_value={})
mocker.patch(MODULE_PATH + 'get_other_discovery_payload', return_value={})
mocker.patch(MODULE_PATH + 'get_job_data', return_value=12)
- mocker.patch(MODULE_PATH + 'get_execution_details', return_value={})
- mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={})
- mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_failed'], params['job_message']))
+ mocker.patch(MODULE_PATH + 'get_execution_details', return_value=({"Completed": ["XX.XX.XX.XX"], "Failed": []},
+ {"JobStatusId": 2050}))
+ mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={"JobStatusId": 2050})
+ mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_message']))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
f_module = self.get_module_mock(params=params['mparams'])
@@ -283,7 +282,7 @@ class TestOmeDiscovery(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_existing_discovery',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
@@ -305,11 +304,13 @@ class TestOmeDiscovery(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'get_other_discovery_payload', return_value={"DiscoveryConfigGroupId": 10})
mocker.patch(MODULE_PATH + 'update_modify_payload', return_value=None)
mocker.patch(MODULE_PATH + 'get_job_data', return_value=12)
- mocker.patch(MODULE_PATH + 'get_execution_details', return_value={})
- mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={})
+ mocker.patch(MODULE_PATH + 'get_execution_details', return_value=({"Completed": ["XX.XX.XX.XX"], "Failed": []},
+ {"JobStatusId": 2050}))
+ mocker.patch(MODULE_PATH + 'get_discovery_job', return_value={"JobStatusId": 2050})
mocker.patch(MODULE_PATH + 'get_discovery_config', return_value={})
mocker.patch(MODULE_PATH + 'get_discovery_states', return_value={12: 15})
- mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_failed'], params['job_message']))
+ mocker.patch(MODULE_PATH + 'discovery_job_tracking', return_value=(params['job_message']))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
error_message = params["job_message"]
with pytest.raises(Exception) as err:
self.module.modify_discovery(f_module, ome_connection_mock_for_discovery, discov_list)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py
index c931ed82c..d69093033 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_domain_user_groups.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.0.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_domain_user_groups
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_domain_user_groups.'
NO_CHANGES_MSG = "No changes found to be applied."
@@ -74,7 +74,7 @@ class TestOMEADUser(FakeAnsibleModule):
def test_delete_directory_user(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
ome_response_mock.status_code = 204
msg, changed = self.module.delete_directory_user(ome_conn_mock_ad, 15011)
- assert msg == "Successfully deleted the active directory user group."
+ assert msg == "Successfully deleted the domain user group."
assert changed is True
def test_get_role(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
@@ -100,13 +100,15 @@ class TestOMEADUser(FakeAnsibleModule):
def test_search_directory(self, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
f_module = self.get_module_mock(params={"state": "present", "group_name": "Administrator",
- "domain_username": "admin@dev0", "domain_password": "password"})
+ "domain_username": "admin@dev0", "domain_password": "password",
+ "directory_type": "LDAP"})
ome_response_mock.json_data = [{"CommonName": "Administrator", "ObjectGuid": "object_id"}]
obj_id, name = self.module.search_directory(f_module, ome_conn_mock_ad, 16011)
assert obj_id == "object_id"
f_module = self.get_module_mock(params={"state": "present", "group_name": "Admin",
- "domain_username": "admin@dev0", "domain_password": "password"})
+ "domain_username": "admin@dev0", "domain_password": "password",
+ "directory_type": "AD"})
with pytest.raises(Exception) as err:
self.module.search_directory(f_module, ome_conn_mock_ad, 16011)
assert err.value.args[0] == "Unable to complete the operation because the entered " \
@@ -173,26 +175,50 @@ class TestOMEADUser(FakeAnsibleModule):
resp, msg = self.module.directory_user(f_module, ome_conn_mock_ad)
assert msg == "imported"
+ @pytest.mark.parametrize("params", [{
+ "module_args": {"state": "present", "group_name": "group1",
+ "domain_username": "admin@dev0", "domain_password": "password",
+ "directory_type": "LDAP"},
+ "directory_user": ([{"UserName": "Group1", "Id": 15011, "RoleId": "10", "Enabled": True}], 'imported'),
+ "msg": "Successfully imported the domain user group."},
+ {
+ "module_args": {"state": "absent", "group_name": "group1",
+ "domain_username": "admin@dev0", "domain_password": "password",
+ "directory_type": "LDAP"},
+ "get_directory_user": ({"UserName": "Group1", "Id": 15011, "RoleId": "10", "Enabled": True}),
+ "delete_directory_user": ("Successfully deleted the domain user group.", True),
+ "msg": "Successfully deleted the domain user group."}])
+ def test_main_success(self, params, ome_conn_mock_ad, ome_response_mock, ome_default_args, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = {"Name": "LDAP2"}
+ ome_conn_mock_ad.strip_substr_dict.return_value = params.get("directory_user", (None, 1))[0]
+ mocker.patch(MODULE_PATH + 'directory_user', return_value=params.get("directory_user", (None, 1)))
+ mocker.patch(MODULE_PATH + 'get_directory_user', return_value=params.get("get_directory_user", (None, 1)))
+ mocker.patch(MODULE_PATH + 'delete_directory_user', return_value=params.get("delete_directory_user", (None, 1)))
+ ome_default_args.update(params['module_args'])
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == params['msg']
+
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
- def test_ome_domain_exception(self, exc_type, mocker, ome_default_args,
- ome_conn_mock_ad, ome_response_mock):
- ome_default_args.update({"state": "absent"})
+ def test_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_conn_mock_ad, ome_response_mock):
+ ome_default_args.update({"state": "absent", "group_name": "group1"})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
mocker.patch(MODULE_PATH + 'get_directory_user', side_effect=exc_type("url open error"))
- result = self._run_module_with_fail_json(ome_default_args)
- assert result["failed"] is True
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'get_directory_user', side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
- mocker.patch(MODULE_PATH + 'get_directory_user',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
+ mocker.patch(MODULE_PATH + 'get_directory_user', side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py
index 082b82934..f13a61b8c 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -228,10 +228,10 @@ class TestOmeFirmware(FakeAnsibleModule):
"PrerequisiteInfo": ""
}
],
- "DeviceIPAddress": "192.168.0.3",
+ "DeviceIPAddress": "XX.XX.XX.XX",
"DeviceId": "28628",
"DeviceModel": "PowerEdge R940",
- "DeviceName": "192.168.0.3",
+ "DeviceName": "XX.XX.XX.XX",
"DeviceServiceTag": "HC2XFL2",
"DeviceTypeId": "1000",
"DeviceTypeName": "SERVER"
@@ -315,12 +315,12 @@ class TestOmeFirmware(FakeAnsibleModule):
else:
builtin_module_name = '__builtin__'
f_module = self.get_module_mock(
- params={'dup_file': "/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE", 'hostname': '192.168.0.1'})
+ params={'dup_file': "/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE", 'hostname': 'XX.XX.XX.XX'})
with patch("{0}.open".format(builtin_module_name), mock_open(read_data="data")) as mock_file:
with pytest.raises(Exception) as exc:
self.module.upload_dup_file(ome_connection_firmware_mock, f_module)
assert exc.value.args[0] == "Unable to upload {0} to {1}".format('/root1/Ansible_EXE/BIOS_87V69_WN64_2.4.7.EXE',
- '192.168.0.1')
+ 'XX.XX.XX.XX')
def test_get_device_ids_success_case(self, ome_connection_firmware_mock, ome_response_mock, ome_default_args):
ome_default_args.update()
@@ -435,7 +435,8 @@ class TestOmeFirmware(FakeAnsibleModule):
def test_job_payload_for_update_case_02(self, ome_connection_firmware_mock, ome_response_mock):
"""baseline case"""
- f_module = self.get_module_mock(params={'schedule': 'RebootNow'})
+ f_module = self.get_module_mock(params={'schedule': 'RebootNow',
+ 'reboot_type': 'GracefulReboot'})
target_data = {}
baseline = {"baseline_id": 1, "repo_id": 2, "catalog_id": 3}
ome_connection_firmware_mock.get_job_type_id.return_value = ome_response_mock
@@ -450,7 +451,8 @@ class TestOmeFirmware(FakeAnsibleModule):
def test_job_payload_for_update_case_03(self, ome_connection_firmware_mock, ome_response_mock):
"""response None case"""
- f_module = self.get_module_mock(params={'schedule': 'RebootNow'})
+ f_module = self.get_module_mock(params={'schedule': 'RebootNow',
+ 'reboot_type': 'PowerCycle'})
target_data = {}
ome_connection_firmware_mock.get_job_type_id.return_value = ome_response_mock
payload = self.module.job_payload_for_update(ome_connection_firmware_mock, f_module, target_data)
@@ -547,7 +549,7 @@ class TestOmeFirmware(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_firmware._validate_device_attributes',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py
index 8af8d6760..76d2ee0db 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.1.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -45,6 +45,7 @@ payload_out1 = {
"CatalogId": 12,
"RepositoryId": 23,
"DowngradeEnabled": True,
+ 'FilterNoRebootRequired': True,
"Is64Bit": True,
"Targets": [
{"Id": 123,
@@ -56,6 +57,7 @@ payload_out1 = {
payload_out2 = {
"Name": "baseline1",
"CatalogId": 12,
+ 'FilterNoRebootRequired': False,
"RepositoryId": 23, 'Description': None, 'DowngradeEnabled': True, 'Is64Bit': True,
"Targets": [
{"Id": 123,
@@ -361,12 +363,14 @@ class TestOmeFirmwareBaseline(FakeAnsibleModule):
"baseline_name": "baseline1",
"baseline_description": "baseline_description",
"downgrade_enabled": True,
- "is_64_bit": True}
+ "is_64_bit": True,
+ "filter_no_reboot_required": True}
payload_param2 = {"catalog_name": "cat1",
"baseline_name": "baseline1",
"baseline_description": None,
"downgrade_enabled": None,
- "is_64_bit": None}
+ "is_64_bit": None,
+ "filter_no_reboot_required": False}
@pytest.mark.parametrize("params", [{"inp": payload_param1, "out": payload_out1},
{"inp": payload_param2, "out": payload_out2}])
@@ -547,7 +551,7 @@ class TestOmeFirmwareBaseline(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_existing_baseline',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
index 96672f6d6..76592ef05 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -22,6 +22,8 @@ from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, \
AnsibleFailJSonException, Constants
+HTTP_ADDRESS = 'https://testhost.com'
+
@pytest.fixture
def ome_connection_mock_for_firmware_baseline_compliance_info(mocker, ome_response_mock):
@@ -60,7 +62,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.side_effect = HTTPError(
- 'http://testhost.com', 400, '', {}, None)
+ HTTP_ADDRESS, 400, '', {}, None)
f_module = self.get_module_mock()
with pytest.raises(HTTPError) as ex:
self.module._get_device_id_from_service_tags(["INVALID"],
@@ -100,7 +102,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
def test_get_device_ids_from_group_ids_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = HTTPError(
- 'http://testhost.com', 400, '', {}, None)
+ HTTP_ADDRESS, 400, '', {}, None)
f_module = self.get_module_mock()
with pytest.raises(HTTPError) as ex:
device_ids = self.module.get_device_ids_from_group_ids(f_module, ["123456"],
@@ -145,7 +147,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
def test_get_device_ids_from_group_names_error_case(self, ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.get_all_report_details.side_effect = HTTPError(
- 'http://testhost.com', 400, '', {}, None)
+ HTTP_ADDRESS, 400, '', {}, None)
f_module = self.get_module_mock(params={"device_group_names": ["abc", "xyz"]})
with pytest.raises(HTTPError) as ex:
self.module.get_device_ids_from_group_names(f_module,
@@ -253,7 +255,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
ome_connection_mock_for_firmware_baseline_compliance_info,
ome_response_mock):
ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = HTTPError(
- 'http://testhost.com', 400, '', {}, None)
+ HTTP_ADDRESS, 400, '', {}, None)
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
with pytest.raises(HTTPError) as ex:
self.module.get_baseline_id_from_name(ome_connection_mock_for_firmware_baseline_compliance_info, f_module)
@@ -268,7 +270,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
'test')
else:
ome_connection_mock_for_firmware_baseline_compliance_info.get_all_items_with_pagination.side_effect = exc_type(
- 'http://testhost.com', 400, '', {}, None)
+ HTTP_ADDRESS, 400, '', {}, None)
ome_response_mock.status_code = 400
ome_response_mock.success = False
f_module = self.get_module_mock(params={"baseline_name": "baseline_name1"})
@@ -348,7 +350,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type('test')
else:
ome_connection_mock_for_firmware_baseline_compliance_info.invoke_request.side_effect = exc_type(
- 'http://testhost.com', 400, '', err_dict, None)
+ HTTP_ADDRESS, 400, '', err_dict, None)
f_module = self.get_module_mock()
with pytest.raises(exc_type):
self.module.get_baselines_report_by_device_ids(
@@ -379,7 +381,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
else:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baseline_id_from_name',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTP_ADDRESS, 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
f_module = self.get_module_mock(params={"baseline_name": "baseline1"})
with pytest.raises(exc_type):
@@ -527,7 +529,7 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
else:
mocker.patch(
'ansible_collections.dellemc.openmanage.plugins.modules.ome_firmware_baseline_compliance_info.get_baselines_report_by_device_ids',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTP_ADDRESS, 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert 'baseline_compliance_info' not in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py
index 6d394a1ae..7095b3b95 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_baseline_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -18,7 +18,7 @@ from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.modules import ome_firmware_baseline_info
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from io import StringIO
from ansible.module_utils._text import to_text
@@ -111,7 +111,7 @@ class TestOmeFirmwareBaselineInfo(FakeAnsibleModule):
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
- ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('http://testhost.com',
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('https://testhost.com',
400,
'http error message',
{
@@ -122,7 +122,7 @@ class TestOmeFirmwareBaselineInfo(FakeAnsibleModule):
assert "error_info" in result
assert result['msg'] == 'HTTP Error 400: http error message'
- ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('http://testhost.com',
+ ome_connection_ome_firmware_baseline_info_mock.invoke_request.side_effect = exc_type('https://testhost.com',
404,
'<404 not found>',
{
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py
index c0f0a5147..07f7260ab 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_firmware_catalog.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -259,11 +259,11 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'check_existing_catalog', side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_existing_catalog',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
@@ -820,11 +820,11 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'validate_names', side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'validate_names',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
@@ -862,3 +862,19 @@ class TestOmeFirmwareCatalog(FakeAnsibleModule):
ome_default_args.update({"repository_type": "HTTPS", "catalog_name": "t1", "catalog_id": 1})
result = self._run_module_with_fail_json(ome_default_args)
assert result["msg"] == "parameters are mutually exclusive: catalog_name|catalog_id"
+
+ @pytest.mark.parametrize("param", [{"hostname": "invalid-host-abcd"}])
+ def test_ome_catalog_invalid_hostname_case1(self, ome_default_args, param):
+ # To verify invalid IP or hostname in module_utils/ome
+ ome_default_args.update({"hostname": param['hostname'], "catalog_name": "catalog1", "repository_type": "HTTPS", "ca_path": ""})
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ assert "error" in result['msg']
+
+ @pytest.mark.parametrize("param", [{"hostname": "ABCD:ABCD:ABCD:EF12:3456:7890"}])
+ def _test_ome_catalog_invalid_hostname_case2(self, ome_default_args, param):
+ # To verify invalid IP or hostname in module_utils/ome
+ ome_default_args.update({"hostname": param['hostname'], "catalog_name": "catalog1", "repository_type": "HTTPS", "ca_path": ""})
+ result = self._run_module(ome_default_args)
+ assert "does not appear to be an IPv4 or IPv6 address" in result['msg']
+ assert param['hostname'] in result['msg']
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py
index 6aede9323..224f8388a 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_groups.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.5.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_groups
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MULTIPLE_GROUPS_MSG = "Provide only one unique device group when state is present."
NONEXIST_GROUP_ID = "A device group with the provided ID does not exist."
@@ -117,6 +117,7 @@ class TestOmeGroups(FakeAnsibleModule):
ome_connection_mock_for_groups.strip_substr_dict.return_value = params.get('created_group', {})
mocker.patch(MODULE_PATH + 'get_ome_group_by_id', return_value=params.get('created_group', {}))
mocker.patch(MODULE_PATH + 'create_parent', return_value=params['created_group'].get('ParentId'))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
ome_default_args.update(params['mparams'])
result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
assert result['msg'] == (params['message']).format(op='create')
@@ -151,6 +152,7 @@ class TestOmeGroups(FakeAnsibleModule):
ome_connection_mock_for_groups.strip_substr_dict.return_value = params.get('created_group', {})
mocker.patch(MODULE_PATH + 'get_ome_group_by_id', return_value=params.get('created_group', {}))
mocker.patch(MODULE_PATH + 'create_parent', return_value=params['created_group'].get('ParentId'))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
# mocker.patch(MODULE_PATH + 'is_parent_in_subtree', return_value=False)
ome_default_args.update(params['mparams'])
result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
@@ -267,7 +269,7 @@ class TestOmeGroups(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_valid_groups',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py
index 93c18d22e..d7a6d8b84 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_identity_pool.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -14,7 +14,7 @@ __metaclass__ = type
import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import ome_identity_pool
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ssl import SSLError
@@ -58,7 +58,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"ip_range": "10.33.0.1-10.33.0.255",
"primary_dns_server": "10.8.8.8",
"secondary_dns_server": "8.8.8.8",
- "subnet_mask": "255.255.255.0"
+ "subnet_mask": "XXX.XXX.XXX.XXX"
},
"starting_mac_address": "60:60:60:60:60:00"
},
@@ -100,7 +100,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'ome_identity_pool.pool_create_modify',
- side_effect=exc_type('http://testhost.com', 400,
+ side_effect=exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str)))
@@ -267,7 +267,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"identity_count": 75,
"starting_mac_address": "aabb.ccdd.7070"
},
- "hostname": "192.168.0.1",
+ "hostname": "XX.XX.XX.XX",
"iscsi_settings": {
"identity_count": 30,
"initiator_config": {
@@ -278,7 +278,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"ip_range": "10.33.0.1-10.33.0.255",
"primary_dns_server": "10.8.8.8",
"secondary_dns_server": "8.8.8.8",
- "subnet_mask": "255.255.255.0"
+ "subnet_mask": "XXX.XXX.XXX.XXX"
},
"starting_mac_address": "60:60:60:60:60:00"
},
@@ -311,7 +311,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
},
"InitiatorIpPoolSettings": {
"IpRange": "10.33.0.1-10.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -339,7 +339,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
assert payload["IscsiSettings"]["Mac"] == {"IdentityCount": 30, "StartingMacAddress": "YGBgYGAA"}
assert payload["IscsiSettings"]["InitiatorIpPoolSettings"] == {
"IpRange": "10.33.0.1-10.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -364,7 +364,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"ip_range": "20.33.0.1-20.33.0.255",
"primary_dns_server": "10.8.8.8",
"secondary_dns_server": "8.8.8.8",
- "subnet_mask": "255.255.255.0"
+ "subnet_mask": "XXX.XXX.XXX.XXX"
},
"starting_mac_address": "10:10:10:10:10:00"
}
@@ -379,7 +379,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
assert payload["IscsiSettings"]["Mac"] == {"IdentityCount": 30, "StartingMacAddress": "EBAQEBAA"}
assert payload["IscsiSettings"]["InitiatorIpPoolSettings"] == {
"IpRange": "20.33.0.1-20.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -1040,7 +1040,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
},
"InitiatorIpPoolSettings": {
"IpRange": "10.33.0.1-10.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -1185,7 +1185,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
self.module.validate_modify_create_payload(modify_payload, f_module, action)
payload_iscsi3 = {
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -1300,7 +1300,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"ip_range": "10.33.0.1-10.33.0.255",
"primary_dns_server": "10.8.8.8",
"secondary_dns_server": "8.8.8.8",
- "subnet_mask": "255.255.255.0"
+ "subnet_mask": "XXX.XXX.XXX.XXX"
},
"starting_mac_address": "60:60:60:60:60:00"
}
@@ -1317,7 +1317,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
},
"InitiatorIpPoolSettings": {
"IpRange": "10.33.0.1-10.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1",
"PrimaryDnsServer": "10.8.8.8",
"SecondaryDnsServer": "8.8.8.8"
@@ -1331,7 +1331,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"initiator_ip_pool_settings": {
"gateway": "192.168.4.1",
"ip_range": "10.33.0.1-10.33.0.255",
- "subnet_mask": "255.255.255.0"
+ "subnet_mask": "XXX.XXX.XXX.XXX"
}
}
self.module.update_iscsi_specific_settings(payload, settings_params, setting_type)
@@ -1340,7 +1340,7 @@ class TestOMeIdentityPool(FakeAnsibleModule):
"IscsiSettings": {
"InitiatorIpPoolSettings": {
"IpRange": "10.33.0.1-10.33.0.255",
- "SubnetMask": "255.255.255.0",
+ "SubnetMask": "XXX.XXX.XXX.XXX",
"Gateway": "192.168.4.1"
}
}}
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py
index 34de35d11..d73e119b6 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_job_info.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,7 +15,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import ome_job_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
@@ -66,6 +66,19 @@ class TestOmeJobInfo(FakeAnsibleModule):
assert result['changed'] is False
assert 'job_info' in result
+ def test_get_execution_history_and_last_execution_detail_of_a_job(self, ome_default_args,
+ ome_connection_job_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"job_id": 1, "fetch_execution_history": True})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"job_id": 1}]}
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert result['changed'] is False
+ assert 'job_info' in result
+ assert 'LastExecutionDetail' in result['job_info']
+ assert 'ExecutionHistories' in result['job_info']
+
def test_job_info_success_case03(self, ome_default_args, ome_connection_job_info_mock,
ome_response_mock):
ome_default_args.update({"system_query_options": {"filter": "abc"}})
@@ -96,9 +109,9 @@ class TestOmeJobInfo(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'ome_job_info._get_query_parameters',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
+ if exc_type != URLError:
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py
index 44ceef4d2..196c0fd32 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_port_breakout.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.0.0
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -17,8 +17,7 @@ import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_port_breakout
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from io import StringIO
from ansible.module_utils._text import to_text
@@ -210,7 +209,7 @@ class TestOMEPortBreakout(FakeAnsibleModule):
if exc_type not in [HTTPError, SSLValidationError]:
ome_connection_breakout_mock.invoke_request.side_effect = exc_type('test')
else:
- ome_connection_breakout_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ ome_connection_breakout_mock.invoke_request.side_effect = exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py
index e7b7a05c6..0420e3a25 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -19,7 +19,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_vlan
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_network_vlan.'
@@ -202,7 +202,7 @@ class TestOmeNetworkVlan(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_existing_vlan',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py
index 084fcd85c..6cbabe928 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_network_vlan_info.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -15,13 +15,15 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import ome_network_vlan_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+ACCESS_TYPE = "application/json"
+HTTP_ADDRESS = 'https://testhost.com'
response = {
'@odata.context': '/api/$metadata#Collection(NetworkConfigurationService.Network)',
@@ -168,7 +170,7 @@ class TestOmeNetworkVlanInfo(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type == HTTPError:
ome_connection_network_vlan_info_mock.invoke_request.side_effect = exc_type(
- 'http://testhost.com', 400, '<400 bad request>', {"accept-type": "application/json"},
+ HTTP_ADDRESS, 400, '<400 bad request>', {"accept-type": ACCESS_TYPE},
StringIO(json_str))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
@@ -176,7 +178,7 @@ class TestOmeNetworkVlanInfo(FakeAnsibleModule):
assert 'error_info' in result
ome_connection_network_vlan_info_mock.invoke_request.side_effect = exc_type(
- 'http://testhost.com', 404, '<404 not found>', {"accept-type": "application/json"}, StringIO(json_str))
+ HTTP_ADDRESS, 404, '<404 not found>', {"accept-type": ACCESS_TYPE}, StringIO(json_str))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
@@ -188,8 +190,8 @@ class TestOmeNetworkVlanInfo(FakeAnsibleModule):
assert 'msg' in result
else:
mocker.patch(MODULE_PATH + 'ome_network_vlan_info.get_network_type_and_qos_type_information',
- side_effect=exc_type('http://testhost.com', 404, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
+ side_effect=exc_type(HTTP_ADDRESS, 404, 'http error message',
+ {"accept-type": ACCESS_TYPE}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py
index 707e495c2..0f23a3e11 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_powerstate.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -422,11 +422,11 @@ class TestOmePowerstate(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'ome_powerstate.spawn_update_job',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
mocker.patch(
MODULE_PATH + 'ome_powerstate.get_device_resource',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert 'power_state' not in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py
index 91f7fc1b5..a1afe7635 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -278,7 +278,7 @@ class TestOmeProfile(FakeAnsibleModule):
"res": "Profile with the name 'profile' not found."},
{"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile",
"description": "new description",
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1",
@@ -298,7 +298,7 @@ class TestOmeProfile(FakeAnsibleModule):
"json_data": 0, "res": "No changes found to be applied."},
{"mparams": {"command": "modify", "name": "profile", "new_name": "modified profile",
"description": "new description",
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso", "iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1",
"IsIgnored": True}]}}, "success": True,
@@ -363,7 +363,7 @@ class TestOmeProfile(FakeAnsibleModule):
"json_data": [234, 123],
"res": "The target device is invalid for the given profile."},
{"mparams": {"command": "assign", "name": "profile", "device_id": 234,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -371,14 +371,14 @@ class TestOmeProfile(FakeAnsibleModule):
"prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123],
"res": "Successfully applied the assign operation."},
{"mparams": {"command": "assign", "name": "profile", "device_service_tag": "ABCDEFG",
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
"success": True, "prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"},
"json_data": [23, 123], "res": "Successfully applied the assign operation."},
{"mparams": {"command": "assign", "name": "profile", "device_id": 234,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -387,7 +387,7 @@ class TestOmeProfile(FakeAnsibleModule):
"json_data": [23, 123],
"res": "The profile is assigned to the target 234."},
{"mparams": {"command": "assign", "name": "profile", "device_id": 234,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -397,7 +397,7 @@ class TestOmeProfile(FakeAnsibleModule):
"res": "The profile is assigned to a different target. Use the migrate command or unassign the profile and "
"then proceed with assigning the profile to the target."},
{"mparams": {"command": "assign", "name": "profile", "device_service_tag": "STG1234",
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -406,7 +406,7 @@ class TestOmeProfile(FakeAnsibleModule):
"json_data": [23, 123],
"res": "The profile is assigned to the target STG1234."},
{"mparams": {"command": "assign", "name": "profile", "device_id": 123,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -415,7 +415,7 @@ class TestOmeProfile(FakeAnsibleModule):
"json_data": [23, 123],
"res": "Target invalid."},
{"mparams": {"command": "assign", "name": "profile", "device_id": 234,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -423,7 +423,7 @@ class TestOmeProfile(FakeAnsibleModule):
"prof": {"Id": 123, "ProfileState": 0}, "target": {"Id": 234, "Name": "mytarget"}, "json_data": [23, 123],
"res": CHANGES_MSG},
{"mparams": {"command": "assign", "name": "profile", "device_id": 234,
- "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "192.168.0.1",
+ "boot_to_network_iso": {"boot_to_network": True, "share_type": "NFS", "share_ip": "XX.XX.XX.XX",
"iso_path": "path/to/my_iso.iso",
"iso_timeout": 8},
"attributes": {"Attributes": [{"Id": 4506, "Value": "server attr 1", "IsIgnored": True}]}},
@@ -540,7 +540,7 @@ class TestOmeProfile(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'profile_operation',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile_info.py
new file mode 100644
index 000000000..22175439b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_profile_info.py
@@ -0,0 +1,1279 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 7.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_profile_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+SUCCESS_MSG = "Successfully retrieved the profile information."
+NO_PROFILES_MSG = "No profiles were found."
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_profile_info.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_profile_info(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeProfileInfo(FakeAnsibleModule):
+ module = ome_profile_info
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Id': 1234, 'Name': "ABCTAG1", "Type": 1000}],
+ "AttributeGroups": [
+ {
+ "GroupNameId": 9,
+ "DisplayName": "iDRAC",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32688,
+ "DisplayName": "Active Directory",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 7587,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2342,
+ "DisplayName": "ActiveDirectory 1 Active Directory RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32851,
+ "DisplayName": "IPv4 Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8133,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2199,
+ "DisplayName": "IPv4 1 IPv4 DHCP Enable",
+ "Description": None,
+ "Value": "Enabled",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 7974,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2198,
+ "DisplayName": "IPv4 1 IPv4 Enable",
+ "Description": None,
+ "Value": "Enabled",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32852,
+ "DisplayName": "IPv4 Static Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 7916,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2400,
+ "DisplayName": "IPv4Static 1 Gateway",
+ "Description": None,
+ "Value": "XX.XX.XX.XX",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 8245,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2399,
+ "DisplayName": "IPv4Static 1 IPv4 Address",
+ "Description": None,
+ "Value": "XX.XX.XX.XX20",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 7724,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2403,
+ "DisplayName": "IPv4Static 1 Net Mask",
+ "Description": None,
+ "Value": "XXX.XXX.XXX.XXX",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32855,
+ "DisplayName": "IPv6 Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8186,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2207,
+ "DisplayName": "IPv6 1 IPV6 Auto Config",
+ "Description": None,
+ "Value": "Enabled",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 7973,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2205,
+ "DisplayName": "IPv6 1 IPV6 Enable",
+ "Description": None,
+ "Value": "Disabled",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32856,
+ "DisplayName": "IPv6 Static Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8244,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2405,
+ "DisplayName": "IPv6Static 1 IPv6 Address 1",
+ "Description": None,
+ "Value": "::",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 7917,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2404,
+ "DisplayName": "IPv6Static 1 IPv6 Gateway",
+ "Description": None,
+ "Value": "::",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 7687,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2408,
+ "DisplayName": "IPv6Static 1 IPV6 Link Local Prefix Length",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32930,
+ "DisplayName": "NIC Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8111,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2193,
+ "DisplayName": "NIC 1 DNS RAC Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 7189,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2194,
+ "DisplayName": "NIC 1 Enable VLAN",
+ "Description": None,
+ "Value": "Disabled",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 7166,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2197,
+ "DisplayName": "NIC 1 VLAN ID",
+ "Description": None,
+ "Value": "1",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32934,
+ "DisplayName": "NIC Static Information",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8116,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2396,
+ "DisplayName": "NICStatic 1 DNS Domain Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 4,
+ "DisplayName": "NIC",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 66,
+ "DisplayName": "NIC.Integrated.1-1-1",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32761,
+ "DisplayName": "FCoE Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6723,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4769,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6735,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5083,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6722,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4734,
+ "DisplayName": "Virtual LAN ID",
+ "Description": None,
+ "Value": "1",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6721,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4641,
+ "DisplayName": "World Wide Port Name Target",
+ "Description": None,
+ "Value": "00:00:00:00:00:00:00:00",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32762,
+ "DisplayName": "FCoE Target 02",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6733,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5113,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32763,
+ "DisplayName": "FCoE Target 03",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6732,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5122,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32764,
+ "DisplayName": "FCoE Target 04",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6734,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5082,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32870,
+ "DisplayName": "iSCSI General Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6730,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4768,
+ "DisplayName": "CHAP Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 6729,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4767,
+ "DisplayName": "CHAP Mutual Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32871,
+ "DisplayName": "iSCSI Initiator Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6713,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4601,
+ "DisplayName": "CHAP ID",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6712,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4681,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32867,
+ "DisplayName": "iSCSI Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6720,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4802,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6719,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4920,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6718,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4609,
+ "DisplayName": "IP Address",
+ "Description": None,
+ "Value": "0.0.0.0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6717,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4537,
+ "DisplayName": "iSCSI Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6716,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4698,
+ "DisplayName": "TCP Port",
+ "Description": None,
+ "Value": "3260",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 67,
+ "DisplayName": "NIC.Integrated.1-2-1",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32761,
+ "DisplayName": "FCoE Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6788,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4769,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6801,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5083,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6787,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4734,
+ "DisplayName": "Virtual LAN ID",
+ "Description": None,
+ "Value": "1",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6786,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4641,
+ "DisplayName": "World Wide Port Name Target",
+ "Description": None,
+ "Value": "00:00:00:00:00:00:00:00",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32762,
+ "DisplayName": "FCoE Target 02",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6799,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5113,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32763,
+ "DisplayName": "FCoE Target 03",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6798,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5122,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32764,
+ "DisplayName": "FCoE Target 04",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6800,
+ "CustomId": 0,
+ "AttributeEditInfoId": 5082,
+ "DisplayName": "Boot Order",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32870,
+ "DisplayName": "iSCSI General Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6796,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4768,
+ "DisplayName": "CHAP Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 6795,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4767,
+ "DisplayName": "CHAP Mutual Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32871,
+ "DisplayName": "iSCSI Initiator Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6778,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4601,
+ "DisplayName": "CHAP ID",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6777,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4681,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32867,
+ "DisplayName": "iSCSI Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6785,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4802,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6784,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4920,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6783,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4609,
+ "DisplayName": "IP Address",
+ "Description": None,
+ "Value": "0.0.0.0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6782,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4537,
+ "DisplayName": "iSCSI Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6781,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4698,
+ "DisplayName": "TCP Port",
+ "Description": None,
+ "Value": "3260",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 65,
+ "DisplayName": "NIC.Integrated.1-3-1",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32870,
+ "DisplayName": "iSCSI General Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6677,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4768,
+ "DisplayName": "CHAP Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 6676,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4767,
+ "DisplayName": "CHAP Mutual Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32871,
+ "DisplayName": "iSCSI Initiator Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6664,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4601,
+ "DisplayName": "CHAP ID",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6663,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4681,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32867,
+ "DisplayName": "iSCSI Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6671,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4802,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6670,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4920,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6669,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4609,
+ "DisplayName": "IP Address",
+ "Description": None,
+ "Value": "0.0.0.0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6668,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4537,
+ "DisplayName": "iSCSI Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6667,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4698,
+ "DisplayName": "TCP Port",
+ "Description": None,
+ "Value": "3260",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 68,
+ "DisplayName": "NIC.Integrated.1-4-1",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 32870,
+ "DisplayName": "iSCSI General Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6852,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4768,
+ "DisplayName": "CHAP Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 6851,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4767,
+ "DisplayName": "CHAP Mutual Authentication",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32871,
+ "DisplayName": "iSCSI Initiator Parameters",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6838,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4601,
+ "DisplayName": "CHAP ID",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6837,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4681,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ },
+ {
+ "GroupNameId": 32867,
+ "DisplayName": "iSCSI Target 01",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 6846,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4802,
+ "DisplayName": "Boot LUN",
+ "Description": None,
+ "Value": "0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6845,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4920,
+ "DisplayName": "CHAP Secret",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6844,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4609,
+ "DisplayName": "IP Address",
+ "Description": None,
+ "Value": "0.0.0.0",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6843,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4537,
+ "DisplayName": "iSCSI Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 6842,
+ "CustomId": 0,
+ "AttributeEditInfoId": 4698,
+ "DisplayName": "TCP Port",
+ "Description": None,
+ "Value": "3260",
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 5,
+ "DisplayName": "System",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 33016,
+ "DisplayName": "Server Operating System",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8513,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2497,
+ "DisplayName": "ServerOS 1 Server Host Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ }
+ ]
+ },
+ {
+ "GroupNameId": 33019,
+ "DisplayName": "Server Topology",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 8593,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2248,
+ "DisplayName": "ServerTopology 1 Aisle Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 8551,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2247,
+ "DisplayName": "ServerTopology 1 Data Center Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ },
+ {
+ "AttributeId": 8371,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2249,
+ "DisplayName": "ServerTopology 1 Rack Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 8370,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2250,
+ "DisplayName": "ServerTopology 1 Rack Slot",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 3
+ },
+ {
+ "AttributeId": 8346,
+ "CustomId": 0,
+ "AttributeEditInfoId": 2500,
+ "DisplayName": "ServerTopology 1 Room Name",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": True,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 2
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ }]
+ },
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with id",
+ 'mparams': {"template_id": 1234}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp1", "Type": 1000}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with name",
+ 'mparams': {"template_name": "temp1"}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp2", "Type": 1000}]},
+ 'message': "Template with name 'temp1' not found.", "success": True, 'case': "template with name",
+ 'mparams': {"template_name": "temp1"}},
+ {"json_data": {'Id': 1234, 'Name': "temp1", "Type": 1000},
+ 'message': SUCCESS_MSG, "success": True, 'case': "profile with id",
+ 'mparams': {"profile_id": 1234}},
+ {"json_data": {"value": [{'Id': 1235, 'ProfileName': "prof0", "Type": 1000},
+ {'Id': 1234, 'ProfileName': "prof1", "Type": 1000}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "profile with name",
+ 'mparams': {"profile_name": "prof1"}},
+ {"json_data": {"value": [{'Id': 1235, 'ProfileName': "prof0", "Type": 1000},
+ {'Id': 1234, 'ProfileName': "prof1", "Type": 1000}]},
+ 'message': "Profiles with profile_name prof2 not found.", "success": True, 'case': "profile with name",
+ 'mparams': {"profile_name": "prof2"}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "prof1", "Type": 1000}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with name",
+ 'mparams': {"system_query_options": {"filter": "ProfileName eq 'prof2'"}}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "prof1", "Type": 1000}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with name",
+ 'mparams': {}},
+ ])
+ def test_ome_profile_info_success(self, params, ome_connection_mock_for_profile_info, ome_response_mock,
+ ome_default_args, module_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_profile_info.get_all_items_with_pagination.return_value = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_profile_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_profile_info, ome_response_mock):
+ ome_default_args.update({"template_id": 1234})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_template_details', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_template_details', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py
index d83725d25..34ebb99a8 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profile_info.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -20,7 +20,7 @@ from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils._text import to_text
from ansible_collections.dellemc.openmanage.plugins.modules import ome_server_interface_profile_info
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_server_interface_profile_info.'
@@ -52,8 +52,10 @@ class TestOMEMSIP(FakeAnsibleModule):
assert err.value.args[0] == "Unable to complete the operation because the entered target " \
"device id(s) '25011' are invalid."
f_module = self.get_module_mock(params={"device_id": [25012]})
- ome_response_mock.json_data = {"Id": "HKRF20", "ServerServiceTag": "HKRF20", "value": [{"Network": []}]}
- ome_conn_mock_sip.json_data = [{"Id": "HKRF20", "ServerServiceTag": "HKRF20"}]
+ ome_response_mock.json_data = {
+ "Id": "HKRF20", "ServerServiceTag": "HKRF20", "value": [{"Network": []}]}
+ ome_conn_mock_sip.json_data = [
+ {"Id": "HKRF20", "ServerServiceTag": "HKRF20"}]
ome_conn_mock_sip.strip_substr_dict.return_value = {"Id": "HKRF20", "ServerServiceTag": "HKRF20",
"Networks": [{"Id": 10001}]}
result = self.module.get_sip_info(f_module, ome_conn_mock_sip)
@@ -64,31 +66,127 @@ class TestOMEMSIP(FakeAnsibleModule):
with pytest.raises(Exception) as err:
self._run_module(ome_default_args)
assert err.value.args[0]['msg'] == "one of the following is required: device_id, device_service_tag."
- ome_default_args.update({"device_id": [25011], "validate_certs": False})
+ ome_default_args.update(
+ {"device_id": [25011], "validate_certs": False})
mocker.patch(MODULE_PATH + 'check_domain_service')
- mocker.patch(MODULE_PATH + 'get_sip_info', return_value={"server_profiles": [{"Id": 25011}]})
+ mocker.patch(MODULE_PATH + 'get_sip_info',
+ return_value={"server_profiles": [{"Id": 25011}]})
result = self._run_module(ome_default_args)
assert result["msg"] == "Successfully retrieved the server interface profile information."
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"report_list": [{"Id": 25012, "DeviceServiceTag": "HKRF20"}],
+ "value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ 'message': "Unable to complete the operation because the server profile(s) for HKRF20 do not exist in the Fabric Manager.",
+ "check_domain_service": True,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CDEV5008",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }
+ },
+ 'mparams': {"device_service_tag": ['HKRF20']}
+ },
+ {"json_data": {"report_list": [{"Id": 25012, "DeviceServiceTag": "HKRF20"}],
+ "value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ 'message': "Unable to complete the operation because the server profile(s) for 25012 do not exist in the Fabric Manager.",
+ "check_domain_service": True,
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CDEV5008",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]}
+ },
+ 'mparams': {"device_id": [25012]}
+ },
+ {"json_data": {"report_list": [{"Id": 25012, "DeviceServiceTag": "HKRF20"}],
+ "value": [
+ {'Id': 1234, 'PublicAddress': "XX.XX.XX.XX", 'DeviceId': 1234, "Type": 1000}]},
+ 'message': "The information retrieval operation of server interface profile is supported only on OpenManage Enterprise Modular.",
+ 'http_error_json': {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1006",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }
+ },
+ 'mparams': {"device_id": [25012]}
+ }
+ ])
+ def test_ome_sip_info_failure(self, params, ome_conn_mock_sip, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_conn_mock_sip.get_all_report_details.return_value = params[
+ 'json_data']
+ mocks = ["check_domain_service"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ if 'http_error_json' in params:
+ json_str = to_text(json.dumps(params.get('http_error_json', {})))
+ ome_conn_mock_sip.invoke_request.side_effect = HTTPError(
+ 'https://testhost.com', 401, 'http error message', {
+ "accept-type": "application/json"},
+ StringIO(json_str))
+ ome_default_args.update(params['mparams'])
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
def test_ome_sip_power_main_exception_case(self, exc_type, mocker, ome_default_args,
ome_conn_mock_sip, ome_response_mock):
- ome_default_args.update({"device_id": [25011], "validate_certs": False})
+ ome_default_args.update(
+ {"device_id": [25011], "validate_certs": False})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("url open error"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("url open error"))
result = self._run_module(ome_default_args)
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
+ mocker.patch(MODULE_PATH + 'check_domain_service',
+ side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py
index dcb1688a0..1231f4404 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_server_interface_profiles.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -74,14 +74,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -132,14 +132,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 10},
@@ -218,14 +218,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan", "VLAN 1"],
+ "names": ["testvlan", "VLAN 1"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -259,14 +259,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -303,14 +303,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"job_wait": False, "device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -358,14 +358,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -413,14 +413,14 @@ class TestOmeSIPs(FakeAnsibleModule):
],
"NicBonded": False
}},
- "vlan_map": {"jagvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
+ "vlan_map": {"testvlan": 10155, "VLAN 1": 11569, "range120-125": 12350, "range130-135": 12352, "two": 14679,
"three": 14681},
"natives": {143: 10155, 1: 11569, 2: 14679, 3: 14681, 0: 0},
'mparams': {"device_service_tag": ["ABC1234"],
"nic_configuration": [{
"nic_identifier": "NIC.Mezzanine.1A-1-1",
"tagged_networks": {
- "names": ["jagvlan"],
+ "names": ["testvlan"],
"state": "present"},
"team": False,
"untagged_network": 3},
@@ -503,7 +503,7 @@ class TestOmeSIPs(FakeAnsibleModule):
"Networks": [
{
"Id": 10155,
- "Name": "jagvlan",
+ "Name": "testvlan",
"Description": None,
"VlanMaximum": 143,
"VlanMinimum": 143,
@@ -529,7 +529,7 @@ class TestOmeSIPs(FakeAnsibleModule):
"Networks": [
{
"Id": 10155,
- "Name": "jagvlan",
+ "Name": "testvlan",
"Description": None,
"VlanMaximum": 143,
"VlanMinimum": 143,
@@ -594,7 +594,7 @@ class TestOmeSIPs(FakeAnsibleModule):
[{"json_data": {"@odata.context": "/api/$metadata#Collection(NetworkConfigurationService.Network)",
"@odata.count": 6,
"value": [{"Id": 10155,
- "Name": "jagvlan",
+ "Name": "testvlan",
"VlanMaximum": 143,
"VlanMinimum": 143,
"Type": 1,
@@ -630,7 +630,7 @@ class TestOmeSIPs(FakeAnsibleModule):
"VlanMinimum": 3,
"Type": 3,
}]},
- "vlan_map": {"jagvlan": 10155,
+ "vlan_map": {"testvlan": 10155,
"VLAN 1": 11569,
"range120-125": 12350,
"range130-135": 12352,
@@ -689,7 +689,7 @@ class TestOmeSIPs(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_valid_service_tags',
- side_effect=exc_type('http://testhost.com',
+ side_effect=exc_type('https://testhost.com',
400,
'http error message',
{"accept-type": "application/json"},
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py
index 5d275f197..4f27b8081 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 3.6.0
-# Copyright (C) 2020-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -164,7 +164,7 @@ class TestOmeSmartFabric(FakeAnsibleModule):
else:
for status_code, msg in {501: SYSTEM_NOT_SUPPORTED_ERROR_MSG, 400: 'http error message'}.items():
mocker.patch(MODULE_PATH + 'ome_smart_fabric.fabric_actions',
- side_effect=exc_type('http://testhost.com', status_code, msg,
+ side_effect=exc_type('https://testhost.com', status_code, msg,
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_info.py
new file mode 100644
index 000000000..afe071acd
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_info.py
@@ -0,0 +1,324 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 7.1.0
+# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_smart_fabric_info
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from io import StringIO
+from ssl import SSLError
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+
+
+@pytest.fixture
+def ome_connection_smart_fabric_info_mock(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(
+ MODULE_PATH + 'ome_smart_fabric_info.RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOMESmartFabricInfo(FakeAnsibleModule):
+ module = ome_smart_fabric_info
+
+ smart_fabric_details_dict = [{"Description": "Fabric f1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "NODEID1"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "NODEID2"
+ }],
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "LifeCycleStatus": [
+ {
+ "Activity": "Create",
+ "Status": "2060"
+ }
+ ],
+ "Uplinks": [
+ {
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "MediaType": "Ethernet",
+ "Name": "u1",
+ "NativeVLAN": 1}],
+ "Switches": [
+ {
+ "ChassisServiceTag": "6H5S6Z2",
+ "ConnectionState": True}],
+ "Servers": [
+ {
+ "ChassisServiceTag": "6H5S6Z2",
+ "ConnectionState": True,
+ "ConnectionStateReason": 101}],
+
+ "Multicast": [
+ {
+ "FloodRestrict": True,
+ "IgmpVersion": "3",
+ "MldVersion": "2"
+ }
+ ],
+ "FabricDesign": [
+ {
+ "FabricDesignNode": [
+ {
+ "ChassisName": "Chassis-X",
+ "NodeName": "Switch-B",
+ "Slot": "Slot-A2",
+ "Type": "WeaverSwitch"
+ },
+ {
+ "ChassisName": "Chassis-X",
+ "NodeName": "Switch-A",
+ "Slot": "Slot-A1",
+ "Type": "WeaverSwitch"
+ }
+ ],
+ "Name": "2xMX9116n_Fabric_Switching_Engines_in_same_chassis",
+ }
+ ],
+ "Name": "f2",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "Enabled",
+ "Summary": {
+ "NodeCount": 2,
+ "ServerCount": 1,
+ "UplinkCount": 1
+ }}]
+
+ @pytest.mark.parametrize("params", [{"json_data": {"Multicast": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Multicast",
+ "Id": "123hg"}}, "json_data_two": {"Multicast": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Multicast"}},
+ "json_data_three": {"Id": 123},
+ "output_one": {'Multicast': {'Id': "123hg"}}, "output_two": {}, "output_three": {"Id": 123}}])
+ def test_clean_data(self, params):
+ result_one = self.module.clean_data(params.get("json_data"))
+ result_two = self.module.clean_data(params.get("json_data_two"))
+ result_three = self.module.clean_data(params.get("json_data_three"))
+ assert result_one == params.get("output_one")
+ assert result_two == params.get("output_two")
+ assert result_three == params.get("output_three")
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "Name": "f1",
+ "Description": "Fabric f1",
+ "Switches@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Switches",
+ "Servers@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Servers",
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/FabricDesign"
+ },
+ "ValidationErrors@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/ValidationErrors",
+ "Uplinks@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Uplinks",
+ "Topology": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Topology"
+ },
+ "ISLLinks@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/ISLLinks",
+ "Multicast": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Multicast"
+ }
+ }}])
+ def test_fetch_smart_fabric_link_details(self, params, ome_connection_mock):
+ f_module = self.get_module_mock()
+ result = self.module.fetch_smart_fabric_link_details(
+ f_module, ome_connection_mock, params.get('json_data'))
+ assert result is not None
+
+ @pytest.mark.parametrize("params", [{"json_data": {
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "Name": "f1",
+ "Description": "Fabric f1",
+ "Switches@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Switches",
+ "Servers@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Servers",
+ "FabricDesign": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/FabricDesign"
+ },
+ "ValidationErrors@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/ValidationErrors",
+ "Uplinks@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Uplinks",
+ "Topology": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Topology"
+ },
+ "ISLLinks@odata.navigationLink": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/ISLLinks",
+ "Multicast": {
+ "@odata.id": "/api/NetworkService/Fabrics('61c20a59-9ed5-4ae5-b850-5e5acf42d2f2')/Multicast"
+ }
+ }}])
+ def test_fetch_smart_fabric_link_details_HTTPError_error_case(self, params, ome_default_args, mocker, ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric information."
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.fetch_smart_fabric_link_details(
+ f_module, ome_connection_mock, params.get('json_data'))
+ assert exc.value.args[0] == error_msg
+
+ def test_ome_smart_fabric_info_main_success_case_all(self, ome_default_args, ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 200
+ result = self._run_module(ome_default_args)
+ assert 'smart_fabric_info' in result
+ assert result['msg'] == "Successfully retrieved the smart fabric information."
+
+ def test_ome_smart_fabric_main_success_case_fabric_id(self, mocker, ome_default_args, ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"fabric_id": "1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"fabric_id": "1"}]}
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'ome_smart_fabric_info.strip_smart_fabric_info',
+ return_value=self.smart_fabric_details_dict)
+ result = self._run_module(ome_default_args)
+ assert 'smart_fabric_info' in result
+ assert result['msg'] == "Successfully retrieved the smart fabric information."
+
+ @pytest.mark.parametrize("params", [{"fabric_name": "f1",
+ "json_data": {"value": [{"Description": "Fabric f1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "NODEID1"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "NODEID2"
+ }],
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "LifeCycleStatus": [
+ {
+ "Activity": "Create",
+ "Status": "2060"
+ }
+ ],
+ "Name": "f1",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "Enabled",
+ "Summary": {
+ "NodeCount": 2,
+ "ServerCount": 1,
+ "UplinkCount": 1
+ }}]
+ }}])
+ def test_ome_smart_fabric_main_success_case_fabric_name(self, mocker, params, ome_default_args, ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"fabric_name": params["fabric_name"]})
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = params["json_data"]
+ mocker.patch(
+ MODULE_PATH + 'ome_smart_fabric_info.strip_smart_fabric_info',
+ return_value=self.smart_fabric_details_dict)
+ result = self._run_module(ome_default_args)
+ assert 'smart_fabric_info' in result
+ assert result['msg'] == "Successfully retrieved the smart fabric information."
+
+ @pytest.mark.parametrize("params", [{"fabric_name": "f1",
+ "json_data": {"value": [{"Description": "Fabric f1",
+ "FabricDesignMapping": [
+ {
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "NODEID1"
+ },
+ {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "NODEID2"
+ }],
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "LifeCycleStatus": [
+ {
+ "Activity": "Create",
+ "Status": "2060"
+ }
+ ],
+ "Name": "f2",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "Enabled",
+ "Summary": {
+ "NodeCount": 2,
+ "ServerCount": 1,
+ "UplinkCount": 1
+ }}]
+ }}])
+ def test_ome_smart_fabric_main_failure_case_fabric_name(self, params, ome_default_args, ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_default_args.update({"fabric_name": params["fabric_name"]})
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = params["json_data"]
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == 'Unable to retrieve smart fabric information with fabric name {0}.'.format(
+ params["fabric_name"])
+
+ def test_ome_smart_fabric_main_failure_case(self, ome_default_args, ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_response_mock.success = True
+ ome_response_mock.status_code = 200
+ ome_response_mock.json_data = {}
+ result = self._run_module(ome_default_args)
+ assert 'smart_fabric_info' not in result
+ assert result['msg'] == "Unable to retrieve smart fabric information."
+
+ @pytest.mark.parametrize("params", [{"fabric_id": "f1"}])
+ def test_get_smart_fabric_details_via_id_HTTPError_error_case(self, params, ome_default_args, mocker, ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric information with fabric ID {0}.".format(
+ params.get('fabric_id'))
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_smart_fabric_details_via_id(
+ f_module, ome_connection_mock, params.get('fabric_id'))
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_smart_fabric_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_smart_fabric_info_mock,
+ ome_response_mock):
+ ome_response_mock.status_code = 404
+ ome_response_mock.success = False
+ fabric_name_dict = {"fabric_name": "f1"}
+ ome_default_args.update(fabric_name_dict)
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ ome_connection_smart_fabric_info_mock.invoke_request.side_effect = exc_type(
+ 'test')
+ else:
+ ome_connection_smart_fabric_info_mock.invoke_request.side_effect = exc_type('https://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ if not exc_type == URLError:
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ result = self._run_module(ome_default_args)
+ assert 'smart_fabric_info' not in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py
index 6670499e9..7d62223aa 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -378,7 +378,7 @@ class TestOmeSmartFabricUplink(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_item_id',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py
new file mode 100644
index 000000000..18fbe8816
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py
@@ -0,0 +1,1155 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_smart_fabric_uplink_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_smart_fabric_uplink_info.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_smart_fabric_uplink_info(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeSmartFabricUplinkInfo(FakeAnsibleModule):
+ module = ome_smart_fabric_uplink_info
+
+ uplink_info = [{
+ "Description": "",
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "MediaType": "Ethernet",
+ "Name": "u1",
+ "NativeVLAN": 1,
+ "Networks": [{
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "Description": "null",
+ "Id": 10155,
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d",
+ "Name": "testvlan",
+ "Type": 1,
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143
+ }],
+ "Ports": [{
+ "AdminStatus": "Enabled",
+ "BlinkStatus": "OFF",
+ "ConfiguredSpeed": "0",
+ "CurrentSpeed": "0",
+ "Description": "",
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "MaxSpeed": "0",
+ "MediaType": "Ethernet",
+ "Name": "",
+ "NodeServiceTag": "SVCTAG1",
+ "OpticsType": "NotPresent",
+ "PortNumber": "ethernet1/1/35",
+ "Role": "Uplink",
+ "Status": "Down",
+ "Type": "PhysicalEthernet"
+ }, {
+ "AdminStatus": "Enabled",
+ "BlinkStatus": "OFF",
+ "ConfiguredSpeed": "0",
+ "CurrentSpeed": "0",
+ "Description": "",
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "MaxSpeed": "0",
+ "MediaType": "Ethernet",
+ "Name": "",
+ "NodeServiceTag": "SVCTAG1",
+ "OpticsType": "NotPresent",
+ "PortNumber": "ethernet1/1/35",
+ "Role": "Uplink",
+ "Status": "Down",
+ "Type": "PhysicalEthernet"
+ }],
+ "Summary": {
+ "NetworkCount": 1,
+ "PortCount": 2
+ },
+ "UfdEnable": "Disabled"
+ }]
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]
+ },
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"}])
+ def test_uplink_details_from_fabric_id(self, params, ome_connection_mock_for_smart_fabric_uplink_info, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("fabric_id"))
+ resp = self.module.get_uplink_details_from_fabric_id(f_module, ome_connection_mock_for_smart_fabric_uplink_info,
+ params.get("fabric_id"))
+ assert resp[0]["Id"] == params["uplink_id"]
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "Name": "f1",
+ "Description": "Fabric f1",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "Enabled",
+ "Summary": {
+ "NodeCount": 2,
+ "ServerCount": 1,
+ "UplinkCount": 1
+ },
+ "LifeCycleStatus": [{
+ "Activity": "Create",
+ "Status": "2060"
+ }],
+ "FabricDesignMapping": [{
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "SVCTAG1"
+ }, {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "SVCTAG1"
+ }],
+ "Actions": "null",
+ }]},
+ "fabric_name": "f1",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_get_fabric_name_details(self, params, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("fabric_name"))
+ fabric_id = self.module.get_fabric_id_from_name(f_module, ome_connection_mock_for_smart_fabric_uplink_info,
+ params.get("fabric_name"))
+ assert fabric_id == params["fabric_id"]
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2", "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"},
+ "success": True,
+ "json_data": {
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]},
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_get_uplink_details(self, params, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ resp = self.module.get_uplink_details(f_module, ome_connection_mock_for_smart_fabric_uplink_info,
+ params.get("fabric_id"), params.get("uplink_id"))
+ assert resp[0]["Id"] == params["uplink_id"]
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"},
+ "success": True,
+ "json_data": {
+ "value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]},
+ "uplink_name": "u1",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"
+ }]
+ )
+ def test_get_uplink_name_details(self, params, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp", {}))
+ uplink_id = self.module.get_uplink_id_from_name(f_module, ome_connection_mock_for_smart_fabric_uplink_info,
+ params.get("uplink_name"), params.get("fabric_id"))
+ assert uplink_id == params["uplink_id"]
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "mparams": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f"},
+ "msg": "Successfully retrieved the fabric uplink information.",
+ "get_uplink_details_from_fabric_id": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]}
+ }, {"success": False,
+ "mparams": {"fabric_id": "f1"},
+ "msg": "Unable to retrieve smart fabric uplink information.",
+ "get_uplink_details_from_fabric_id": {}},
+ ]
+ )
+ def test_main_case_success_all(self, params, ome_connection_mock_for_smart_fabric_uplink_info, ome_default_args, ome_response_mock,
+ mocker):
+ mocker.patch(MODULE_PATH + 'get_uplink_details_from_fabric_id',
+ return_value=params.get("get_uplink_details_from_fabric_id"))
+ mocker.patch(MODULE_PATH + 'strip_uplink_info',
+ return_value=params.get("get_uplink_details_from_fabric_id"))
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("strip_uplink_info")
+ ome_default_args.update(params.get('mparams'))
+ result = self._run_module(ome_default_args)
+ assert result["msg"] == 'Successfully retrieved the fabric uplink information.'
+
+ def test_ome_smart_fabric_main_success_case_fabric_id(self, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"fabric_id": "1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = {"value": [{"fabric_id": "1"}]}
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert 'uplink_info' in result
+ assert result['msg'] == "Successfully retrieved the fabric uplink information."
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "Name": "f1",
+ "Description": "Fabric f1",
+ "OverrideLLDPConfiguration": "Disabled",
+ "ScaleVLANProfile": "Enabled",
+ "Summary": {
+ "NodeCount": 2,
+ "ServerCount": 1,
+ "UplinkCount": 1
+ },
+ "LifeCycleStatus": [{
+ "Activity": "Create",
+ "Status": "2060"
+ }],
+ "FabricDesignMapping": [{
+ "DesignNode": "Switch-A",
+ "PhysicalNode": "SVCTAG1"
+ }, {
+ "DesignNode": "Switch-B",
+ "PhysicalNode": "SVCTAG1"
+ }],
+ "Actions": "null",
+ }]},
+ "fabric_name": "f1",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_ome_smart_fabric_main_success_case_fabric_name(self, params, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"fabric_name": "f1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["json_data"]
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert 'uplink_info' in result
+ assert result['msg'] == "Successfully retrieved the fabric uplink information."
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2", "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"},
+ "success": True,
+ "json_data": {
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]},
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_ome_smart_fabric_main_failure_case_uplink_id(self, params, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"uplink_id": "u1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["json_data"]
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == "fabric_id or fabric_name is required along with uplink_id."
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2", "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"},
+ "success": True,
+ "json_data": {
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]},
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_ome_smart_fabric_main_success_case_uplink_id(self, params, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"fabric_id": "f1", "uplink_id": "u1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["json_data"]
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert 'uplink_info' in result
+ assert result['msg'] == "Successfully retrieved the fabric uplink information."
+
+ @pytest.mark.parametrize("params", [{"inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2", "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"},
+ "success": True,
+ "json_data": {
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {
+ "PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]},
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2"
+ }]
+ )
+ def test_ome_smart_fabric_main_failure_case_uplink_name(self, params, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"uplink_name": "u1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params["json_data"]
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert result['msg'] == "fabric_id or fabric_name is required along with uplink_name."
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]
+ },
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"}])
+ def test_ome_smart_fabric_main_success_case_uplink_name(self, params, mocker, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"fabric_id": "f1", "uplink_name": "u1"})
+ ome_response_mock.success = True
+ ome_response_mock.json_data = params.get("json_data")
+ ome_response_mock.status_code = 200
+ mocker.patch(
+ MODULE_PATH + 'strip_uplink_info',
+ return_value=self.uplink_info)
+ result = self._run_module(ome_default_args)
+ assert 'uplink_info' in result
+ assert result['msg'] == "Successfully retrieved the fabric uplink information."
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": ""
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null"
+ }]
+ }],
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null"
+ }],
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": ""
+ }]
+ },
+ 'message': "Successfully retrieved the fabric uplink information.",
+ 'mparams': {"fabric_id": "f1",
+ "uplink_id": "u1"}
+ }, {"success": True,
+ "json_data": {"value": [{
+ "Uplinks@odata.navigationLink": "/odata/UpLink/1ad54420/b145/49a1/9779/21a579ef6f2d",
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": ""
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null"
+ }]
+ }],
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null"
+ }],
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": ""
+ }]
+ },
+ 'message': "Successfully retrieved the fabric uplink information.",
+ 'mparams': {}
+ }])
+ def test_ome_smart_fabric_exit_json(self, params, ome_default_args, ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert 'uplink_info' in result
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "json_data": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]
+ },
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_id": "1ad54420-b145-49a1-9779-21a579ef6f2d"}])
+ def test_get_all_uplink_details(self, params, ome_connection_mock_for_smart_fabric_uplink_info, ome_response_mock):
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock()
+ resp = self.module.get_all_uplink_details(
+ f_module, ome_connection_mock_for_smart_fabric_uplink_info)
+ assert resp == []
+
+ @pytest.mark.parametrize("params", [{"success": True,
+ "inp": {"fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_name": "1ad54420-b145-49a1-9779-21a579ef6f2d"},
+ "json_data": {"value": [{
+ "Id": "1ad54420-b145-49a1-9779-21a579ef6f2d",
+ "Name": "u1",
+ "Description": "",
+ "MediaType": "Ethernet",
+ "NativeVLAN": 1,
+ "Summary": {"PortCount": 2,
+ "NetworkCount": 1
+ },
+ "UfdEnable": "Disabled",
+ "Ports@odata.count": 2,
+ "Ports": [{
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }, {
+ "Id": "SVCTAG1:ethernet1/1/35",
+ "Name": "",
+ "Description": "",
+ "Type": "PhysicalEthernet",
+ "MediaType": "Ethernet",
+ "NodeServiceTag": "SVCTAG1",
+ "PortNumber": "ethernet1/1/35",
+ "Status": "Down",
+ "AdminStatus": "Enabled",
+ "CurrentSpeed": "0",
+ "MaxSpeed": "0",
+ "ConfiguredSpeed": "0",
+ "OpticsType": "NotPresent",
+ "BlinkStatus": "OFF",
+ "Role": "Uplink"
+ }],
+ "Networks@odata.count": 1,
+ "Networks": [{
+ "Id": 10155,
+ "Name": "testvlan",
+ "Description": "null",
+ "VlanMaximum": 143,
+ "VlanMinimum": 143,
+ "Type": 1,
+ "CreatedBy": "system",
+ "CreationTime": "2018-09-25 14:46:12.374",
+ "UpdatedBy": "root",
+ "UpdatedTime": "2019-06-27 15:06:22.836",
+ "InternalRefNWUUId": "f15a36b6-e3d3-46b2-9e7d-bf9cd66e180d"
+ }]
+ }]
+ },
+ "fabric_id": "61c20a59-9ed5-4ae5-b850-5e5acf42d2f2",
+ "uplink_name": "1ad54420-b145-49a1-9779-21a579ef6f2d"}])
+ def test_get_uplink_name_failure_case(self, params, mocker, ome_connection_mock_for_smart_fabric_uplink_info, ome_response_mock, ome_default_args):
+ ome_default_args.update(params.get("inp"))
+ ome_response_mock.success = params["success"]
+ ome_response_mock.json_data = params["json_data"]
+ f_module = self.get_module_mock(params=params.get("inp"))
+ # result = self.module.get_uplink_id_from_name(f_module, ome_connection_mock_for_smart_fabric_uplink_info,
+ # params.get("uplink_name"), params.get("fabric_id"))
+ mocker.patch(
+ MODULE_PATH + 'get_uplink_id_from_name',
+ return_value="")
+ uplink_id = self.module.get_uplink_id_from_name(ome_default_args)
+ assert uplink_id == ""
+
+ @pytest.mark.parametrize("params", [{"uplink_name": "f1", "fabric_id": "u1"}])
+ def test_get_uplink_id_from_name_HTTPError_error_case(self, params, ome_default_args, mocker,
+ ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric uplink information."
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_uplink_id_from_name(f_module, ome_connection_mock, params.get("uplink_name"),
+ params.get('fabric_id'))
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("params", [{"fabric_name": "f1"}])
+ def test_get_all_uplink_details_HTTPError_error_case(self, params, ome_default_args, mocker,
+ ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric uplink information."
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_all_uplink_details(f_module, ome_connection_mock)
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("params", [{"fabric_name": "f1"}])
+ def test_get_fabric_id_from_name_HTTPError_error_case(self, params, ome_default_args, mocker,
+ ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric uplink information."
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_fabric_id_from_name(
+ f_module, ome_connection_mock, params.get('fabric_name'))
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("params", [{"fabric_id": "f1", "uplink_id": "u1"}])
+ def test_get_uplink_details_HTTPError_error_case(self, params, ome_default_args, mocker,
+ ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric uplink information with uplink ID {0}.".format(
+ params.get('uplink_id'))
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_uplink_details(f_module, ome_connection_mock, params.get(
+ 'fabric_id'), params.get('uplink_id'))
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("params", [{"fabric_id": "f1"}])
+ def test_get_uplink_details_from_fabric_id_HTTPError_error_case(self, params, ome_default_args, mocker,
+ ome_connection_mock):
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ error_msg = "Unable to retrieve smart fabric uplink information with fabric ID {0}.".format(
+ params.get('fabric_id'))
+ ome_connection_mock.invoke_request.side_effect = HTTPError('https://testdell.com', 404,
+ error_msg,
+ {"accept-type": "application/json"},
+ StringIO(json_str))
+ f_module = self.get_module_mock()
+ with pytest.raises(Exception) as exc:
+ self.module.get_uplink_details_from_fabric_id(
+ f_module, ome_connection_mock, params.get('fabric_id'))
+ assert exc.value.args[0] == error_msg
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_smart_fabric_uplink_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_smart_fabric_uplink_info,
+ ome_response_mock):
+ ome_default_args.update({"fabric_id": "f1"})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_uplink_details_from_fabric_id',
+ side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_uplink_details_from_fabric_id',
+ side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_uplink_details_from_fabric_id',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py
index 27c84ffab..35b6f7b44 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.2.0
-# Copyright (C) 2019-2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -31,7 +31,8 @@ def ome_connection_mock_for_template(mocker, ome_response_mock):
connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
- ome_connection_mock_obj.get_all_report_details.return_value = {"report_list": []}
+ ome_connection_mock_obj.get_all_report_details.return_value = {
+ "report_list": []}
return ome_connection_mock_obj
@@ -51,8 +52,10 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_connection_mock_for_template.get_all_report_details.return_value = {
"report_list": [{"Id": Constants.device_id1,
"DeviceServiceTag": Constants.service_tag1}]}
- f_module = self.get_module_mock({'device_id': [], 'device_service_tag': [Constants.service_tag1]})
- data = self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ f_module = self.get_module_mock(
+ {'device_id': [], 'device_service_tag': [Constants.service_tag1]})
+ data = self.module.get_device_ids(
+ f_module, ome_connection_mock_for_template)
assert data == [Constants.device_id1]
def test_get_device_ids_failure_case01(self, ome_connection_mock_for_template, ome_response_mock, ome_default_args):
@@ -60,7 +63,8 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock.success = False
f_module = self.get_module_mock(params={'device_id': ["#@!1"]})
with pytest.raises(Exception) as exc:
- self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ self.module.get_device_ids(
+ f_module, ome_connection_mock_for_template)
assert exc.value.args[0] == "Unable to complete the operation because the entered target device id(s) " \
"'{0}' are invalid.".format("#@!1")
@@ -205,9 +209,11 @@ class TestOmeTemplate(FakeAnsibleModule):
{"Id": Constants.device_id2,
"DeviceServiceTag": "tag2"}
]}
- f_module = self.get_module_mock(params={'device_id': [Constants.device_id2], 'device_service_tag': ["abcd"]})
+ f_module = self.get_module_mock(
+ params={'device_id': [Constants.device_id2], 'device_service_tag': ["abcd"]})
with pytest.raises(Exception) as exc:
- self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ self.module.get_device_ids(
+ f_module, ome_connection_mock_for_template)
assert exc.value.args[0] == "Unable to complete the operation because the entered target service tag(s) " \
"'{0}' are invalid.".format('abcd')
@@ -217,9 +223,11 @@ class TestOmeTemplate(FakeAnsibleModule):
"report_list": [{"Id": Constants.device_id1,
"DeviceServiceTag": Constants.service_tag1}
], "resp_obj": ome_response_mock}
- f_module = self.get_module_mock(params={'device_service_tag': [Constants.service_tag1], 'device_id': []})
+ f_module = self.get_module_mock(
+ params={'device_service_tag': [Constants.service_tag1], 'device_id': []})
with pytest.raises(Exception) as exc:
- device_ids = self.module.get_device_ids(f_module, ome_connection_mock_for_template)
+ device_ids = self.module.get_device_ids(
+ f_module, ome_connection_mock_for_template)
assert exc.value.args[0] == "Failed to fetch the device ids."
def test_get_view_id_success_case(self, ome_connection_mock_for_template, ome_response_mock):
@@ -237,7 +245,8 @@ class TestOmeTemplate(FakeAnsibleModule):
"SourceDeviceId": 2224}])
def test_get_create_payload(self, param, ome_response_mock, ome_connection_mock_for_template):
f_module = self.get_module_mock(params=param)
- data = self.module.get_create_payload(f_module, ome_connection_mock_for_template, 2224, 4)
+ data = self.module.get_create_payload(
+ f_module, ome_connection_mock_for_template, 2224, 4)
assert data['Fqdds'] == "All"
def test_get_template_by_id_success_case(self, ome_response_mock):
@@ -249,7 +258,8 @@ class TestOmeTemplate(FakeAnsibleModule):
assert data
def test_get_template_by_name_success_case(self, ome_response_mock, ome_connection_mock_for_template):
- ome_response_mock.json_data = {'value': [{"Name": "test Sample Template import1", "Id": 24}]}
+ ome_response_mock.json_data = {
+ 'value': [{"Name": "test Sample Template import1", "Id": 24}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
@@ -259,20 +269,24 @@ class TestOmeTemplate(FakeAnsibleModule):
assert data["Id"] == 24
def test_get_group_devices_all(self, ome_response_mock, ome_connection_mock_for_template):
- ome_response_mock.json_data = {'value': [{"Name": "Device1", "Id": 24}]}
+ ome_response_mock.json_data = {
+ 'value': [{"Name": "Device1", "Id": 24}]}
ome_response_mock.status_code = 200
ome_response_mock.success = True
f_module = self.get_module_mock()
- data = self.module.get_group_devices_all(ome_connection_mock_for_template, "uri")
+ data = self.module.get_group_devices_all(
+ ome_connection_mock_for_template, "uri")
assert data == [{"Name": "Device1", "Id": 24}]
def _test_get_template_by_name_fail_case(self, ome_response_mock):
- ome_response_mock.json_data = {'value': [{"Name": "template by name for template name", "Id": 12}]}
+ ome_response_mock.json_data = {
+ 'value': [{"Name": "template by name for template name", "Id": 12}]}
ome_response_mock.status_code = 500
ome_response_mock.success = False
f_module = self.get_module_mock()
with pytest.raises(Exception) as exc:
- self.module.get_template_by_name("template by name for template name", f_module, ome_response_mock)
+ self.module.get_template_by_name(
+ "template by name for template name", f_module, ome_response_mock)
assert exc.value.args[0] == "Unable to complete the operation because the" \
" requested template with name {0} is not present." \
.format("template by name for template name")
@@ -305,7 +319,8 @@ class TestOmeTemplate(FakeAnsibleModule):
return_value=["Deployment"])
mocker.patch(MODULE_PATH + 'get_create_payload',
return_value=params["mid"])
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
assert data == params["out"]
modify_payload = {"command": "modify", "device_id": [25007], "template_id": 1234,
@@ -334,68 +349,90 @@ class TestOmeTemplate(FakeAnsibleModule):
return_value={})
mocker.patch(MODULE_PATH + 'get_modify_payload',
return_value={})
- mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
assert data == ('TemplateService/Templates(1234)', {}, 'PUT')
def test__get_resource_parameters_delete_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "delete", "template_id": 1234})
- mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ f_module = self.get_module_mock(
+ {"command": "delete", "template_id": 1234})
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
assert data == ('TemplateService/Templates(1234)', {}, 'DELETE')
def test__get_resource_parameters_export_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "export", "template_id": 1234})
- mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
- assert data == ('TemplateService/Actions/TemplateService.Export', {'TemplateId': 1234}, 'POST')
+ f_module = self.get_module_mock(
+ {"command": "export", "template_id": 1234})
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
+ assert data == (
+ 'TemplateService/Actions/TemplateService.Export', {'TemplateId': 1234}, 'POST')
def test__get_resource_parameters_deploy_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "deploy", "template_id": 1234})
+ f_module = self.get_module_mock(
+ {"command": "deploy", "template_id": 1234})
mocker.patch(MODULE_PATH + 'get_device_ids',
return_value=[Constants.device_id1])
mocker.patch(MODULE_PATH + 'get_deploy_payload',
return_value={"deploy_payload": "value"})
- mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
- assert data == ('TemplateService/Actions/TemplateService.Deploy', {"deploy_payload": "value"}, 'POST')
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Deploy',
+ {"deploy_payload": "value"}, 'POST')
def test__get_resource_parameters_clone_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "clone", "template_id": 1234, "template_view_type": 2})
+ f_module = self.get_module_mock(
+ {"command": "clone", "template_id": 1234, "template_view_type": 2})
mocker.patch(MODULE_PATH + 'get_view_id',
return_value=2)
mocker.patch(MODULE_PATH + 'get_clone_payload',
return_value={"clone_payload": "value"})
- mocker.patch(MODULE_PATH + 'get_template_details', return_value={"Id": 1234, "Name": "templ1"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
- assert data == ('TemplateService/Actions/TemplateService.Clone', {"clone_payload": "value"}, 'POST')
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ return_value={"Id": 1234, "Name": "templ1"})
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Clone',
+ {"clone_payload": "value"}, 'POST')
def test__get_resource_parameters_import_success_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "import", "template_id": 1234, "template_view_type": 2})
+ f_module = self.get_module_mock(
+ {"command": "import", "template_id": 1234, "template_view_type": 2})
mocker.patch(MODULE_PATH + 'get_view_id',
return_value=2)
mocker.patch(MODULE_PATH + 'get_import_payload',
return_value={"import_payload": "value"})
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
- assert data == ('TemplateService/Actions/TemplateService.Import', {"import_payload": "value"}, 'POST')
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
+ assert data == ('TemplateService/Actions/TemplateService.Import',
+ {"import_payload": "value"}, 'POST')
@pytest.mark.parametrize("params", [{"inp": {"command": "modify"}, "mid": inter_payload, "out": payload_out}])
def test__get_resource_parameters_modify_template_none_failure_case(self, mocker, ome_response_mock,
ome_connection_mock_for_template, params):
f_module = self.get_module_mock(params=params["inp"])
with pytest.raises(Exception) as exc:
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
assert exc.value.args[0] == "Enter a valid template_name or template_id"
@pytest.mark.parametrize("params",
[{"success": True, "json_data": {"value": [{"Name": "template_name", "Id": 123}]},
"id": 123, "gtype": True},
- {"success": True, "json_data": {}, "id": 0, "gtype": False},
+ {"success": True, "json_data": {},
+ "id": 0, "gtype": False},
{"success": False, "json_data": {"value": [{"Name": "template_name", "Id": 123}]},
"id": 0, "gtype": False},
{"success": True, "json_data": {"value": [{"Name": "template_name1", "Id": 123}]},
@@ -404,13 +441,15 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock):
ome_response_mock.success = params["success"]
ome_response_mock.json_data = params["json_data"]
- id = self.module.get_type_id_valid(ome_connection_mock_for_template, params["id"])
+ id = self.module.get_type_id_valid(
+ ome_connection_mock_for_template, params["id"])
assert id == params["gtype"]
@pytest.mark.parametrize("params",
[{"success": True, "json_data": {"value": [{"Description": "Deployment", "Id": 2}]},
"view": "Deployment", "gtype": 2},
- {"success": True, "json_data": {}, "view": "Compliance", "gtype": 1},
+ {"success": True, "json_data": {},
+ "view": "Compliance", "gtype": 1},
{"success": False, "json_data": {"value": [{"Description": "template_name", "Id": 1}]},
"view": "Deployment", "gtype": 2},
{"success": True, "json_data": {"value": [{"Description": "template_name1", "Id": 2}]},
@@ -419,12 +458,14 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock):
ome_response_mock.success = params["success"]
ome_response_mock.json_data = params["json_data"]
- id = self.module.get_view_id(ome_connection_mock_for_template, params["view"])
+ id = self.module.get_view_id(
+ ome_connection_mock_for_template, params["view"])
assert id == params["gtype"]
@pytest.mark.parametrize("param",
[{"pin": {"NetworkBootIsoModel": {"ShareDetail": {"Password": "share_password"}}}},
- {"pin": {"NetworkBootIsoModel": {"ShareDetail": {"Password1": "share_password"}}}},
+ {"pin": {"NetworkBootIsoModel": {
+ "ShareDetail": {"Password1": "share_password"}}}},
{"pin": {"NetworkBootIsoModel": {"ShareDetail": [{"Password1": "share_password"}]}}}])
def test_password_no_log(self, param):
attributes = param["pin"]
@@ -432,13 +473,15 @@ class TestOmeTemplate(FakeAnsibleModule):
def test__get_resource_parameters_create_failure_case_02(self, mocker, ome_response_mock,
ome_connection_mock_for_template):
- f_module = self.get_module_mock({"command": "create", "template_name": "name"})
+ f_module = self.get_module_mock(
+ {"command": "create", "template_name": "name"})
mocker.patch(MODULE_PATH + 'get_device_ids',
return_value=[Constants.device_id1, Constants.device_id2])
mocker.patch(MODULE_PATH + 'get_template_by_name',
return_value=("template", 1234))
with pytest.raises(Exception) as exc:
- data = self.module._get_resource_parameters(f_module, ome_connection_mock_for_template)
+ data = self.module._get_resource_parameters(
+ f_module, ome_connection_mock_for_template)
assert exc.value.args[0] == "Create template requires only one reference device"
def test_main_template_success_case2(self, ome_default_args, mocker, module_mock, ome_connection_mock_for_template,
@@ -453,17 +496,22 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock.success = True
mocker.patch(MODULE_PATH + '_get_resource_parameters',
return_value=(TEMPLATE_RESOURCE, "template_payload", "POST"))
+ mocker.patch(MODULE_PATH + 'time.sleep', return_value=None)
result = self._run_module(ome_default_args)
assert result['changed'] is True
- assert result['msg'] == "Successfully created a template with ID {0}".format(ome_response_mock.json_data)
+ assert result['msg'] == "Successfully created a template with ID {0}".format(
+ ome_response_mock.json_data)
def test_get_import_payload_success_case_01(self, ome_connection_mock_for_template):
- f_module = self.get_module_mock(params={"attributes": {"Name": "template1", "Content": "Content"}})
- self.module.get_import_payload(f_module, ome_connection_mock_for_template, 2)
+ f_module = self.get_module_mock(
+ params={"attributes": {"Name": "template1", "Content": "Content"}})
+ self.module.get_import_payload(
+ f_module, ome_connection_mock_for_template, 2)
def test_get_deploy_payload_success_case_01(self):
module_params = {"attributes": {"Name": "template1"}}
- self.module.get_deploy_payload(module_params, [Constants.device_id1], 1234)
+ self.module.get_deploy_payload(
+ module_params, [Constants.device_id1], 1234)
@pytest.mark.parametrize("param",
[{"mparams": {"attributes": {"Name": "template1"}}, "name": "template0",
@@ -473,7 +521,8 @@ class TestOmeTemplate(FakeAnsibleModule):
def test_get_clone_payload_success_case_01(self, param, ome_connection_mock_for_template):
f_module = self.get_module_mock(param["mparams"])
module_params = param["mparams"]
- payload = self.module.get_clone_payload(f_module, ome_connection_mock_for_template, param['template_id'], 2)
+ payload = self.module.get_clone_payload(
+ f_module, ome_connection_mock_for_template, param['template_id'], 2)
assert payload == param['clone_payload']
@pytest.mark.parametrize("param",
@@ -511,8 +560,10 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock.json_data = {
"value": [{'Id': 1, "Name": "mygroup3"}, {'Id': 2, "Name": "mygroup2"}, {'Id': 3, "Name": "mygroup"}]}
ome_response_mock.status_code = 200
- mocker.patch(MODULE_PATH + 'get_group_devices_all', return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
- dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module)
+ mocker.patch(MODULE_PATH + 'get_group_devices_all',
+ return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
+ dev_list = self.module.get_group_details(
+ ome_connection_mock_for_template, f_module)
assert dev_list == param["dev_list"]
@pytest.mark.parametrize("param", [
@@ -526,8 +577,10 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock.json_data = {
"value": [{'Id': 1, "Name": "mygroup3"}, {'Id': 2, "Name": "mygroup2"}, {'Id': 3, "Name": "mygroup"}]}
ome_response_mock.status_code = 200
- mocker.patch(MODULE_PATH + 'get_group_devices_all', return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
- dev_list = self.module.get_group_details(ome_connection_mock_for_template, f_module)
+ mocker.patch(MODULE_PATH + 'get_group_devices_all',
+ return_value=[{'Id': 1}, {'Id': 2}, {'Id': 3}])
+ dev_list = self.module.get_group_details(
+ ome_connection_mock_for_template, f_module)
assert dev_list == param["dev_list"]
@pytest.mark.parametrize("params", [
@@ -567,35 +620,150 @@ class TestOmeTemplate(FakeAnsibleModule):
ome_response_mock):
ome_response_mock.success = params.get("success", True)
ome_response_mock.json_data = params["json_data"]
- mocker.patch(MODULE_PATH + 'get_template_by_name', return_value=params.get('get_template_by_name'))
- mocker.patch(MODULE_PATH + 'attributes_check', return_value=params.get('attributes_check', 0))
- f_module = self.get_module_mock(params=params["mparams"], check_mode=params.get('check_mode', False))
+ mocker.patch(MODULE_PATH + 'get_template_by_name',
+ return_value=params.get('get_template_by_name'))
+ mocker.patch(MODULE_PATH + 'attributes_check',
+ return_value=params.get('attributes_check', 0))
+ f_module = self.get_module_mock(
+ params=params["mparams"], check_mode=params.get('check_mode', False))
error_message = params["res"]
with pytest.raises(Exception) as err:
- self.module.get_modify_payload(f_module, ome_connection_mock_for_template, params.get('template'))
+ self.module.get_modify_payload(
+ f_module, ome_connection_mock_for_template, params.get('template'))
assert err.value.args[0] == error_message
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Running"}}, True),
+ 'message': "Template operation is in progress. Task excited after 'job_wait_timeout'.",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234}
+ },
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Running"}}, True),
+ 'message': "Changes found to be applied.",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234},
+ "check_mode": True
+ },
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'TemplateId': 1, 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'TemplateId': 12, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Running"}}, True),
+ "get_device_ids": [123, 1234],
+ 'message': "The device(s) '123' have been assigned the template(s) '1' respectively. Please unassign the profiles from the devices.",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234}
+ },
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'TemplateId': 123, 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'TemplateId': 12, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Running"}}, True),
+ "get_device_ids": [123],
+ 'message': "No changes found to be applied.",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234}
+ },
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'TemplateId': 123, 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'TemplateId': 12, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Running"}}, True),
+ "get_device_ids": [123],
+ 'message': "No changes found to be applied.",
+ 'mparams': {"command": "delete", "template_id": 12, "device_id": 1234}
+ }
+ ])
+ def test_ome_template_success(self, params, ome_connection_mock_for_template, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_template.get_all_report_details.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ mocks = ["job_tracking", "get_device_ids"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ result = self._run_module(
+ ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Complete"}}, True),
+ 'message': "Failed to deploy template.",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234}
+ },
+ {"json_data": {"value": [
+ {'Id': 123, 'TargetId': 123, 'ProfileState': 1,
+ 'DeviceId': 1234, "Type": 1000},
+ {'Id': 234, 'TargetId': 235, 'ProfileState': 1, 'DeviceId': 1235, "Type": 1000}],
+ "report_list": [{'Id': 1234, 'PublicAddress': "XX.XX.XX.XX",
+ 'DeviceId': 1234, "Type": 1000}]},
+ "job_tracking": (True, "msg", {'LastRunStatus': {"Name": "Complete"}}, True),
+ "get_device_ids": [],
+ 'message': "There are no devices provided for deploy operation",
+ 'mparams': {"command": "deploy", "template_id": 123, "device_id": 1234}
+ }
+ ])
+ def test_ome_template_fail_json(self, params, ome_connection_mock_for_template, ome_response_mock,
+ ome_default_args, module_mock, mocker):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_template.get_all_report_details.return_value = params[
+ 'json_data']
+ ome_default_args.update(params['mparams'])
+ mocks = ["job_tracking", "get_device_ids"]
+ for m in mocks:
+ if m in params:
+ mocker.patch(MODULE_PATH + m, return_value=params.get(m, {}))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['msg'] == params['message']
+
@pytest.mark.parametrize("exc_type",
[IOError, ValueError, TypeError, ConnectionError,
HTTPError, URLError, SSLError])
def test_main_template_exception_case(self, exc_type, mocker, ome_default_args,
ome_connection_mock_for_template, ome_response_mock):
- ome_default_args.update({"command": "export", "template_name": "t1", 'attributes': {'Attributes': "myattr1"}})
+ ome_default_args.update(
+ {"command": "export", "template_name": "t1", 'attributes': {'Attributes': "myattr1"}})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
if exc_type == URLError:
mocker.patch(MODULE_PATH + 'password_no_log')
- mocker.patch(MODULE_PATH + '_get_resource_parameters', side_effect=exc_type("url open error"))
+ mocker.patch(MODULE_PATH + '_get_resource_parameters',
+ side_effect=exc_type("url open error"))
result = self._run_module(ome_default_args)
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + '_get_resource_parameters', side_effect=exc_type("exception message"))
+ mocker.patch(MODULE_PATH + '_get_resource_parameters',
+ side_effect=exc_type("exception message"))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + '_get_resource_parameters',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py
index 0e6cbca4f..425e6f299 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_identity_pool.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.1.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,7 +15,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import ome_template_identity_pool
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ssl import SSLError
@@ -85,7 +85,7 @@ class TestOMETemplateIdentityPool(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'get_identity_id',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str))
)
result = self._run_module_with_fail_json(ome_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py
index 8f8bb3285..f59520e55 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_info.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -88,7 +88,7 @@ class TestOmeTemplateInfo(FakeAnsibleModule):
if exc_type not in [HTTPError, SSLValidationError]:
ome_connection_template_info_mock.invoke_request.side_effect = exc_type('test')
else:
- ome_connection_template_info_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ ome_connection_template_info_mock.invoke_request.side_effect = exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py
index c182b2b94..0ec0759f3 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -341,7 +341,7 @@ class TestOmeTemplateNetworkVlan(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'validate_vlans',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan_info.py
new file mode 100644
index 000000000..dfb718f0a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_template_network_vlan_info.py
@@ -0,0 +1,346 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 7.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import json
+from io import StringIO
+from ssl import SSLError
+
+import pytest
+from ansible.module_utils._text import to_text
+from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_template_network_vlan_info
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+
+SUCCESS_MSG = "Successfully retrieved the template network VLAN information."
+NO_TEMPLATES_MSG = "No templates with network info were found."
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_template_network_vlan_info.'
+
+
+@pytest.fixture
+def ome_connection_mock_for_vlaninfo(mocker, ome_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'RestOME')
+ ome_connection_mock_obj = connection_class_mock.return_value.__enter__.return_value
+ ome_connection_mock_obj.invoke_request.return_value = ome_response_mock
+ return ome_connection_mock_obj
+
+
+class TestOmeTemplateVlanInfo(FakeAnsibleModule):
+ module = ome_template_network_vlan_info
+
+ @pytest.mark.parametrize("params", [
+ {"json_data": {"value": [{'Id': 1234, 'Name': "ABCTAG1", "Type": 1000}],
+ "AttributeGroups": [
+ {
+ "GroupNameId": 1001,
+ "DisplayName": "NICModel",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 3,
+ "DisplayName": "NIC in Mezzanine 1B",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Port ",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Partition ",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 0,
+ "CustomId": 32,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan Tagged",
+ "Description": None,
+ "Value": "25367, 32656, 32658, 26898",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 32,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan UnTagged",
+ "Description": None,
+ "Value": "21474",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 32,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "NIC Bonding Enabled",
+ "Description": None,
+ "Value": "False",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 2,
+ "DisplayName": "Port ",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Partition ",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 0,
+ "CustomId": 31,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan Tagged",
+ "Description": None,
+ "Value": None,
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 31,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan UnTagged",
+ "Description": None,
+ "Value": "32658",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 31,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "NIC Bonding Enabled",
+ "Description": None,
+ "Value": "true",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 1,
+ "DisplayName": "NIC in Mezzanine 1A",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Port ",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Partition ",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 0,
+ "CustomId": 30,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan Tagged",
+ "Description": None,
+ "Value": "32656, 32658",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 30,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan UnTagged",
+ "Description": None,
+ "Value": "25367",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 30,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "NIC Bonding Enabled",
+ "Description": None,
+ "Value": "true",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 2,
+ "DisplayName": "Port ",
+ "SubAttributeGroups": [
+ {
+ "GroupNameId": 1,
+ "DisplayName": "Partition ",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 0,
+ "CustomId": 29,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan Tagged",
+ "Description": None,
+ "Value": "21474",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 29,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Vlan UnTagged",
+ "Description": None,
+ "Value": "32656",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ },
+ {
+ "AttributeId": 0,
+ "CustomId": 29,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "NIC Bonding Enabled",
+ "Description": None,
+ "Value": "False",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ }
+ ]
+ }
+ ],
+ "Attributes": []
+ }
+ ],
+ "Attributes": []
+ }
+ ],
+ "Attributes": []
+ },
+ {
+ "GroupNameId": 1005,
+ "DisplayName": "NicBondingTechnology",
+ "SubAttributeGroups": [],
+ "Attributes": [
+ {
+ "AttributeId": 0,
+ "CustomId": 0,
+ "AttributeEditInfoId": 0,
+ "DisplayName": "Nic Bonding Technology",
+ "Description": None,
+ "Value": "LACP",
+ "IsReadOnly": False,
+ "IsIgnored": False,
+ "IsSecure": False,
+ "IsLinkedToSecure": False,
+ "TargetSpecificTypeId": 0
+ }
+ ]
+ }]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with id",
+ 'mparams': {"template_id": 1234}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp1", "ViewTypeId": 1}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "template with name",
+ 'mparams': {"template_name": "temp1"}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp2", "ViewTypeId": 2}]},
+ 'message': "Template with name 'temp1' not found.", "success": True, 'case': "template not found",
+ 'mparams': {"template_name": "temp1"}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp2", "ViewTypeId": 3}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "all templates case",
+ 'mparams': {}},
+ {"json_data": {"value": [{'Id': 1234, 'Name': "temp2", "ViewTypeId": 4}]},
+ 'message': SUCCESS_MSG, "success": True, 'case': "invalid templates case",
+ 'mparams': {}}
+ ])
+ def test_ome_template_network_vlan_info_success(self, params, ome_connection_mock_for_vlaninfo, ome_response_mock,
+ ome_default_args, module_mock):
+ ome_response_mock.success = params.get("success", True)
+ ome_response_mock.json_data = params['json_data']
+ ome_connection_mock_for_vlaninfo.get_all_items_with_pagination.return_value = params['json_data']
+ ome_default_args.update(params['mparams'])
+ result = self._run_module(ome_default_args, check_mode=params.get('check_mode', False))
+ assert result['msg'] == params['message']
+
+ @pytest.mark.parametrize("exc_type",
+ [IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
+ def test_ome_template_network_vlan_info_main_exception_failure_case(self, exc_type, mocker, ome_default_args,
+ ome_connection_mock_for_vlaninfo,
+ ome_response_mock):
+ ome_default_args.update({"template_id": 1234})
+ ome_response_mock.status_code = 400
+ ome_response_mock.success = False
+ json_str = to_text(json.dumps({"info": "error_details"}))
+ if exc_type == URLError:
+ mocker.patch(MODULE_PATH + 'get_template_details', side_effect=exc_type("url open error"))
+ result = self._run_module(ome_default_args)
+ assert result["unreachable"] is True
+ elif exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'get_template_details', side_effect=exc_type("exception message"))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ else:
+ mocker.patch(MODULE_PATH + 'get_template_details',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result = self._run_module_with_fail_json(ome_default_args)
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py
index ac3c18145..623b65535 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.0.0
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -17,8 +17,7 @@ import pytest
from ansible_collections.dellemc.openmanage.plugins.modules import ome_user
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants, \
- AnsibleFailJSonException
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from io import StringIO
from ansible.module_utils._text import to_text
@@ -171,7 +170,7 @@ class TestOmeUser(FakeAnsibleModule):
else:
mocker.patch(
MODULE_PATH + 'ome_user._get_resource_parameters',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(ome_default_args)
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py
index 6d48cc183..c640c89e0 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_user_info.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.1
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -87,7 +87,7 @@ class TestOmeUserInfo(FakeAnsibleModule):
if exc_type not in [HTTPError, SSLValidationError]:
ome_connection_user_info_mock.invoke_request.side_effect = exc_type('test')
else:
- ome_connection_user_info_mock.invoke_request.side_effect = exc_type('http://testhost.com', 400,
+ ome_connection_user_info_mock.invoke_request.side_effect = exc_type('https://testhost.com', 400,
'http error message',
{"accept-type": "application/json"},
StringIO(json_str))
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py
index 075406a75..9a77be0c4 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_event_subscription.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 4.1.0
-# Copyright (C) 2021 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -25,6 +25,7 @@ SUBSCRIPTION_UNABLE_ADD = "Unable to add a subscription."
SUBSCRIPTION_ADDED = "Successfully added the subscription."
DESTINATION_MISMATCH = "No changes found to be applied."
EVENT_TYPE_INVALID = "value of event_type must be one of: Alert, MetricReport, got: Metricreport"
+PARAM_DESTINATION = "https://XX.XX.XX.XX:8188"
@pytest.fixture
@@ -38,8 +39,8 @@ def redfish_connection_mock(mocker, redfish_response_mock):
class TestRedfishSubscription(FakeAnsibleModule):
module = redfish_event_subscription
- @pytest.mark.parametrize("val", [{"destination": "https://192.168.1.100:8188"},
- {"destination": "https://192.168.1.100:8189"}])
+ @pytest.mark.parametrize("val", [{"destination": PARAM_DESTINATION},
+ {"destination": "https://XX.XX.XX.XX:8189"}])
def test_function_get_subscription_success(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args, val):
redfish_default_args.update({"state": "absent"})
@@ -53,7 +54,7 @@ class TestRedfishSubscription(FakeAnsibleModule):
"Context": "RedfishEvent",
"DeliveryRetryPolicy": "RetryForever",
"Description": "Event Subscription Details",
- "Destination": "https://192.168.1.100:8189",
+ "Destination": "https://XX.XX.XX.XX:8189",
"EventFormatType": "Event",
"EventTypes": [
"Alert"
@@ -82,7 +83,7 @@ class TestRedfishSubscription(FakeAnsibleModule):
"Context": "RedfishEvent",
"DeliveryRetryPolicy": "RetryForever",
"Description": "Event Subscription Details",
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"EventTypes": [
"MetricReport"
@@ -130,9 +131,9 @@ class TestRedfishSubscription(FakeAnsibleModule):
assert result["Destination"] == val["destination"]
@pytest.mark.parametrize("val", [
- {"destination": "https://192.168.1.100:8188", "event_type": "MetricReport",
+ {"destination": PARAM_DESTINATION, "event_type": "MetricReport",
"event_format_type": "MetricReport"},
- {"destination": "https://192.168.1.100:8188", "event_type": "Alert", "event_format_type": "Event"}])
+ {"destination": PARAM_DESTINATION, "event_type": "Alert", "event_format_type": "Event"}])
def test_function_create_subscription(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args, val):
redfish_default_args.update({"state": "absent"})
@@ -157,9 +158,9 @@ class TestRedfishSubscription(FakeAnsibleModule):
assert result.json_data["EventTypes"] == [val["event_type"]]
@pytest.mark.parametrize("val", [
- {"destination": "https://100.96.80.1:161", "event_type": "MetricReport",
+ {"destination": "https://XX.XX.XX.XX:161", "event_type": "MetricReport",
"event_format_type": "MetricReport"},
- {"destination": "https://100.96.80.1:161", "event_type": "Alert", "event_format_type": "Event"}])
+ {"destination": "https://XX.XX.XX.XX:161", "event_type": "Alert", "event_format_type": "Event"}])
def test_function_get_subscription_details(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args, val):
redfish_default_args.update({"state": "absent"})
@@ -202,9 +203,9 @@ class TestRedfishSubscription(FakeAnsibleModule):
assert result["EventTypes"] == [val["event_type"]]
@pytest.mark.parametrize("val", [
- {"destination": "https://100.96.80.1:161", "event_type": "MetricReport",
+ {"destination": "https://XX.XX.XX.XX:161", "event_type": "MetricReport",
"event_format_type": "MetricReport"},
- {"destination": "https://100.96.80.1:161", "event_type": "Alert", "event_format_type": "Event"}])
+ {"destination": "https://XX.XX.XX.XX:161", "event_type": "Alert", "event_format_type": "Event"}])
def test_function_get_subscription_details_None(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args, val):
redfish_default_args.update({"state": "absent"})
@@ -245,8 +246,8 @@ class TestRedfishSubscription(FakeAnsibleModule):
assert result is None
@pytest.mark.parametrize("val", [
- {"destination": "https://100.96.80.1:161"},
- {"destination": "https://100.96.80.1:161"}])
+ {"destination": "https://XX.XX.XX.XX:161"},
+ {"destination": "https://XX.XX.XX.XX:161"}])
def test_function_delete_subscription(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args, val):
redfish_default_args.update({"state": "absent"})
@@ -284,7 +285,8 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_validation_input_params(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "absent"})
- redfish_default_args.update({"destination": "http://192.168.1.100:8188"})
+ http_str = "http"
+ redfish_default_args.update({"destination": http_str + "://XX.XX.XX.XX:8188"})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
with pytest.raises(Exception) as err:
@@ -294,7 +296,7 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_absent_does_not_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "absent"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
@@ -307,13 +309,13 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_absent_does_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "absent"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
"Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
@@ -331,13 +333,13 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_absent_does_exist_error(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "absent"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
"Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
@@ -354,12 +356,12 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_present_does_not_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "present"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
@@ -380,12 +382,12 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_present_does_not_exist_error(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "present"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
@@ -406,12 +408,12 @@ class TestRedfishSubscription(FakeAnsibleModule):
redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "present"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "Metricreport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
@@ -433,13 +435,13 @@ class TestRedfishSubscription(FakeAnsibleModule):
def test_module_present_does_exist(self, mocker, redfish_connection_mock, redfish_response_mock,
redfish_default_args):
redfish_default_args.update({"state": "present"})
- redfish_default_args.update({"destination": "https://192.168.1.100:8188"})
+ redfish_default_args.update({"destination": PARAM_DESTINATION})
redfish_default_args.update({"event_type": "MetricReport"})
redfish_default_args.update({"event_format_type": "MetricReport"})
json_data = {
"Id": "c6ff37fc-8204-11eb-b08f-2cea7ff7fe80",
- "Destination": "https://192.168.1.100:8188",
+ "Destination": PARAM_DESTINATION,
"EventFormatType": "MetricReport",
"Context": "RedfishEvent",
"Protocol": "Redfish",
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py
index dac24df41..88e3c5ed0 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.5.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -17,7 +17,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import redfish_firmware
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from mock import MagicMock
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -26,7 +26,10 @@ from ansible.module_utils._text import to_text
from mock import patch, mock_open
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
-JOB_URI = "/redfish/v1/JobService/Jobs/{job_id}"
+JOB_URI = "JobService/Jobs/{job_id}"
+FIRMWARE_DATA = "multipart/form-data"
+HTTPS_IMAGE_URI = "https://home/firmware_repo/component.exe"
+HTTPS_ADDRESS_DELL = "https://dell.com"
@pytest.fixture
@@ -109,39 +112,41 @@ class TestRedfishFirmware(FakeAnsibleModule):
def test_main_redfish_firmware_success_case(self, redfish_firmware_connection_mock, redfish_default_args, mocker,
redfish_response_mock):
- redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe"})
+ redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe", "job_wait": False})
redfish_firmware_connection_mock.headers.get("Location").return_value = "https://multipart/form-data"
- redfish_firmware_connection_mock.headers.get("Location").split().return_value = "multipart/form-data"
+ redfish_firmware_connection_mock.headers.get("Location").split().return_value = FIRMWARE_DATA
mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
return_value=redfish_response_mock)
- redfish_response_mock.json_data = {"image_uri": "http://home/firmware_repo/component.exe"}
+ redfish_response_mock.json_data = {"image_uri": HTTPS_IMAGE_URI}
redfish_response_mock.status_code = 201
redfish_response_mock.success = True
result = self._run_module(redfish_default_args)
- assert result == {'changed': True,
- 'msg': 'Successfully submitted the firmware update task.',
- 'task': {'id': redfish_response_mock.headers.get().split().__getitem__(),
- 'uri': JOB_URI.format(job_id=redfish_response_mock.headers.get().split().__getitem__())}}
+ assert result['changed'] is True
+ assert result['msg'] == 'Successfully submitted the firmware update task.'
+ assert result['task']['id'] == redfish_response_mock.headers.get().split().__getitem__()
+ assert result['task']['uri'] == JOB_URI.format(job_id=redfish_response_mock.headers.get().split().__getitem__())
@pytest.mark.parametrize("exc_type",
[URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
def test_main_redfish_firmware_exception_handling_case(self, exc_type, mocker, redfish_default_args,
redfish_firmware_connection_mock,
redfish_response_mock):
- redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe"})
+ redfish_default_args.update({"image_uri": "/home/firmware_repo/component.exe", "job_wait_timeout": 0})
redfish_response_mock.json_data = {"value": [{"image_uri": "/home/firmware_repo/component.exe"}]}
redfish_response_mock.status_code = 400
redfish_response_mock.success = False
json_str = to_text(json.dumps({"data": "out"}))
-
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + 'redfish_firmware.firmware_update',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(redfish_default_args)
+ if exc_type == HTTPError:
+ result = self._run_module(redfish_default_args)
+ else:
+ result = self._run_module_with_fail_json(redfish_default_args)
assert 'task' not in result
assert 'msg' in result
assert result['failed'] is True
@@ -150,7 +155,7 @@ class TestRedfishFirmware(FakeAnsibleModule):
def test_get_update_service_target_success_case(self, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock):
- redfish_default_args.update({"transfer_protocol": "HTTP"})
+ redfish_default_args.update({"transfer_protocol": "HTTP", "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 200
redfish_response_mock.success = True
@@ -162,17 +167,17 @@ class TestRedfishFirmware(FakeAnsibleModule):
}
},
"transfer_protocol": "HTTP",
- "HttpPushUri": "http://dell.com",
+ "HttpPushUri": HTTPS_ADDRESS_DELL,
"FirmwareInventory": {
"@odata.id": "2134"
}
}
result = self.module._get_update_service_target(redfish_firmware_connection_mock, f_module)
- assert result == ('2134', 'http://dell.com', '')
+ assert result == ('2134', HTTPS_ADDRESS_DELL, '')
def test_get_update_service_target_uri_none_case(self, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock):
- redfish_default_args.update({"transfer_protocol": "HTTP"})
+ redfish_default_args.update({"transfer_protocol": "HTTP", "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 200
redfish_response_mock.success = True
@@ -195,7 +200,7 @@ class TestRedfishFirmware(FakeAnsibleModule):
def test_get_update_service_target_failed_case(self, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock):
- redfish_default_args.update({"transfer_protocol": "HTTP"})
+ redfish_default_args.update({"transfer_protocol": "HTTP", "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 200
redfish_response_mock.success = True
@@ -206,7 +211,7 @@ class TestRedfishFirmware(FakeAnsibleModule):
}
},
"transfer_protocol": "HTTP",
- "HttpPushUri": "http://dell.com",
+ "HttpPushUri": HTTPS_ADDRESS_DELL,
"FirmwareInventory": {
"@odata.id": "2134"
}
@@ -218,13 +223,13 @@ class TestRedfishFirmware(FakeAnsibleModule):
def test_firmware_update_success_case01(self, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock, mocker):
mocker.patch(MODULE_PATH + 'redfish_firmware._get_update_service_target',
- return_value=('2134', 'http://dell.com', 'redfish'))
- redfish_default_args.update({"image_uri": "http://home/firmware_repo/component.exe",
- "transfer_protocol": "HTTP"})
+ return_value=('2134', HTTPS_ADDRESS_DELL, 'redfish'))
+ redfish_default_args.update({"image_uri": HTTPS_IMAGE_URI,
+ "transfer_protocol": "HTTP", "timeout": 0, "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 200
redfish_response_mock.success = True
- redfish_response_mock.json_data = {"image_uri": "http://home/firmware_repo/component.exe",
+ redfish_response_mock.json_data = {"image_uri": HTTPS_IMAGE_URI,
"transfer_protocol": "HTTP"}
result = self.module.firmware_update(redfish_firmware_connection_mock, f_module)
assert result == redfish_response_mock
@@ -232,15 +237,15 @@ class TestRedfishFirmware(FakeAnsibleModule):
def test_firmware_update_success_case02(self, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock, mocker):
mocker.patch(MODULE_PATH + "redfish_firmware._get_update_service_target",
- return_value=('2134', 'nhttp://dell.com', 'multipart/form-data'))
+ return_value=('2134', HTTPS_ADDRESS_DELL, 'multipart/form-data'))
mocker.patch("ansible_collections.dellemc.openmanage.plugins.modules.redfish_firmware._encode_form_data",
- return_value=({"file": (3, "nhttp://dell.com", "multipart/form-data")}, "multipart/form-data"))
- redfish_default_args.update({"image_uri": "nhttp://home/firmware_repo/component.exe",
- "transfer_protocol": "HTTP"})
+ return_value=({"file": (3, HTTPS_ADDRESS_DELL, FIRMWARE_DATA)}, FIRMWARE_DATA))
+ redfish_default_args.update({"image_uri": HTTPS_IMAGE_URI,
+ "transfer_protocol": "HTTP", "timeout": 0, "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 200
redfish_response_mock.success = True
- redfish_response_mock.json_data = {"image_uri": "nhttp://home/firmware_repo/component.exe",
+ redfish_response_mock.json_data = {"image_uri": HTTPS_IMAGE_URI,
"transfer_protocol": "HTTP"}
if sys.version_info.major == 3:
builtin_module_name = 'builtins'
@@ -250,18 +255,22 @@ class TestRedfishFirmware(FakeAnsibleModule):
result = self.module.firmware_update(redfish_firmware_connection_mock, f_module)
assert result == redfish_response_mock
- def test_firmware_update_success_case03(self, redfish_default_args, redfish_firmware_connection_mock,
+ @pytest.mark.parametrize("params", [{"ip": "192.161.1.1:443"}, {"ip": "192.161.1.1"},
+ {"ip": "82f5:d985:a2d5:f0c3:5392:cc52:27d1:4da6"},
+ {"ip": "[82f5:d985:a2d5:f0c3:5392:cc52:27d1:4da6]"},
+ {"ip": "[82f5:d985:a2d5:f0c3:5392:cc52:27d1:4da6]:443"}])
+ def test_firmware_update_success_case03(self, params, redfish_default_args, redfish_firmware_connection_mock,
redfish_response_mock, mocker):
mocker.patch(MODULE_PATH + "redfish_firmware._get_update_service_target",
- return_value=('2134', 'nhttp://dell.com', 'multipart/form-data'))
+ return_value=('2134', HTTPS_ADDRESS_DELL, 'multipart/form-data'))
mocker.patch(MODULE_PATH + "redfish_firmware._encode_form_data",
- return_value=({"file": (3, "nhttp://dell.com", "multipart/form-data")}, "multipart/form-data"))
- redfish_default_args.update({"image_uri": "nhttp://home/firmware_repo/component.exe",
- "transfer_protocol": "HTTP"})
+ return_value=({"file": (3, HTTPS_ADDRESS_DELL, FIRMWARE_DATA)}, FIRMWARE_DATA))
+ redfish_default_args.update({"baseuri": params["ip"], "image_uri": HTTPS_IMAGE_URI,
+ "transfer_protocol": "HTTP", "timeout": 0, "job_wait_timeout": 0})
f_module = self.get_module_mock(params=redfish_default_args)
redfish_response_mock.status_code = 201
redfish_response_mock.success = True
- redfish_response_mock.json_data = {"image_uri": "nhttp://home/firmware_repo/component.exe",
+ redfish_response_mock.json_data = {"image_uri": HTTPS_IMAGE_URI,
"transfer_protocol": "HTTP"}
if sys.version_info.major == 3:
builtin_module_name = 'builtins'
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware_rollback.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware_rollback.py
new file mode 100644
index 000000000..68171c0b0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_firmware_rollback.py
@@ -0,0 +1,299 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 8.2.0
+# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import json
+from ansible_collections.dellemc.openmanage.plugins.modules import redfish_firmware_rollback
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from io import StringIO
+from mock import MagicMock
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+ACCESS_TYPE = "application/json"
+HTTP_ERROR_MSG = 'http error message'
+HTTPS_ADDRESS = 'https://testhost.com'
+
+
+@pytest.fixture
+def redfish_connection_mock(mocker, redfish_response_mock):
+ connection_class_mock = mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.Redfish')
+ redfish_connection_obj = connection_class_mock.return_value.__enter__.return_value
+ redfish_connection_obj.invoke_request.return_value = redfish_response_mock
+ return redfish_connection_obj
+
+
+class TestRedfishFirmware(FakeAnsibleModule):
+
+ module = redfish_firmware_rollback
+
+ @pytest.mark.parametrize("exc_type", [URLError, HTTPError, TypeError])
+ def test_wait_for_redfish_idrac_reset_http(self, exc_type, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, mocker):
+ redfish_default_args.update({"name": "BIOS", "reboot": True, "reboot_timeout": 900})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.time.sleep', return_value=None)
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.Redfish', return_value=MagicMock())
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.require_session', return_value=(1, "secret token"))
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type == HTTPError:
+ redfish_connection_mock.invoke_request.side_effect = exc_type(
+ HTTPS_ADDRESS, 401, HTTP_ERROR_MSG, {"accept-type": ACCESS_TYPE},
+ StringIO(json_str)
+ )
+ result = self.module.wait_for_redfish_idrac_reset(f_module, redfish_connection_mock, 5)
+ assert result[0] is False
+ assert result[1] is True
+ assert result[2] == "iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
+ redfish_connection_mock.invoke_request.side_effect = exc_type(
+ HTTPS_ADDRESS, 400, HTTP_ERROR_MSG, {"accept-type": ACCESS_TYPE},
+ StringIO(json_str)
+ )
+ result = self.module.wait_for_redfish_idrac_reset(f_module, redfish_connection_mock, 5)
+ assert result[0] is True
+ assert result[1] is True
+ assert result[2] == "iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
+ elif exc_type == URLError:
+ redfish_connection_mock.invoke_request.side_effect = exc_type("exception message")
+ result = self.module.wait_for_redfish_idrac_reset(f_module, redfish_connection_mock, 5)
+ assert result[0] is True
+ assert result[1] is True
+ assert result[2] == "iDRAC reset is in progress. Until the iDRAC is reset, the changes would not apply."
+ else:
+ redfish_connection_mock.invoke_request.side_effect = exc_type("exception message")
+ result = self.module.wait_for_redfish_idrac_reset(f_module, redfish_connection_mock, 5)
+ assert result[0] is True
+ assert result[1] is True
+
+ def test_wait_for_redfish_idrac_reset(self, redfish_connection_mock, redfish_response_mock,
+ redfish_default_args, mocker):
+ redfish_default_args.update({"name": "BIOS", "reboot": True, "reboot_timeout": 900})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.time.sleep', return_value=None)
+ result = self.module.wait_for_redfish_idrac_reset(f_module, redfish_connection_mock, 900)
+ assert result[0] is False
+ assert result[1] is False
+ assert result[2] == "iDRAC has been reset successfully."
+
+ def test_rollback_firmware(self, redfish_connection_mock, redfish_response_mock, redfish_default_args, mocker):
+ redfish_default_args.update({"name": "BIOS", "reboot": True, "reboot_timeout": 900})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.simple_update", return_value=["JID_12345678"])
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_reboot_job",
+ return_value=({"Id": "JID_123456789"}, True, ""))
+ job_resp_mock = MagicMock()
+ job_resp_mock.json_data = {"JobState": "RebootFailed"}
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_job_complete",
+ return_value=(job_resp_mock, ""))
+ f_module = self.get_module_mock(params=redfish_default_args)
+ preview_uri = ["/redfish/v1/Previous1.1"]
+ reboot_uri = ["/redfish/v1/Previous.life_cycle.1.1"]
+ update_uri = "/redfish/v1/SimpleUpdate"
+ with pytest.raises(Exception) as ex:
+ self.module.rollback_firmware(redfish_connection_mock, f_module, preview_uri, reboot_uri, update_uri)
+ assert ex.value.args[0] == "Failed to reboot the server."
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_job_complete",
+ return_value=(job_resp_mock, "Failed message."))
+ with pytest.raises(Exception) as ex:
+ self.module.rollback_firmware(redfish_connection_mock, f_module, preview_uri, reboot_uri, update_uri)
+ assert ex.value.args[0] == "Task excited after waiting for 900 seconds. " \
+ "Check console for firmware rollback status."
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_reboot_job",
+ return_value=({}, False, "Reset operation is failed."))
+ with pytest.raises(Exception) as ex:
+ self.module.rollback_firmware(redfish_connection_mock, f_module, preview_uri, reboot_uri, update_uri)
+ assert ex.value.args[0] == "Reset operation is failed."
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.get_job_status",
+ return_value=({"JobState": "Completed"}, False))
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ job_resp_mock.json_data = {"JobState": "RebootCompleted"}
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_job_complete",
+ return_value=(job_resp_mock, ""))
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.simple_update", return_value=["JID_12345678"])
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.wait_for_redfish_idrac_reset",
+ return_value=(False, True, ""))
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.get_job_status",
+ return_value=([{"JobState": "Completed"}], 0))
+ result = self.module.rollback_firmware(redfish_connection_mock, f_module, preview_uri, reboot_uri, update_uri)
+ assert result[0] == [{'JobState': 'Completed'}, {'JobState': 'Completed'}]
+ assert result[1] == 0
+
+ redfish_default_args.update({"name": "BIOS", "reboot": False, "reboot_timeout": 900})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.get_job_status",
+ return_value=([{"JobState": "Scheduled"}], 0))
+ result = self.module.rollback_firmware(redfish_connection_mock, f_module, preview_uri, [], update_uri)
+ assert result[0] == [{"JobState": "Scheduled"}]
+ assert result[1] == 0
+
+ def test_main(self, redfish_connection_mock, redfish_response_mock, redfish_default_args, mocker):
+ redfish_default_args.update({"reboot": True, "name": "BIOS"})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.get_rollback_preview_target",
+ return_value=(["Previous/URI/1"], [], "/redfish/SimpleUpdate"))
+ job_status = {"ActualRunningStartTime": "2023-08-07T05:09:08", "ActualRunningStopTime": "2023-08-07T05:12:41",
+ "CompletionTime": "2023-08-07T05:12:41", "Description": "Job Instance", "EndTime": "TIME_NA",
+ "Id": "JID_914026562845", "JobState": "Completed", "JobType": "FirmwareUpdate",
+ "Message": "Job completed successfully.", "MessageArgs": [], "MessageId": "PR19",
+ "Name": "Firmware Rollback: Network", "PercentComplete": 100, "StartTime": "2023-08-07T05:04:16",
+ "TargetSettingsURI": None}
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 0, False))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "Successfully completed the job for firmware rollback."
+ assert result["job_status"]["JobState"] == "Completed"
+ job_status.update({"JobState": "Failed"})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 1, False))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "The job for firmware rollback has been completed with error(s)."
+ assert result["job_status"]["JobState"] == "Failed"
+ redfish_default_args.update({"reboot": False, "name": "BIOS"})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 1, False))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "The job for firmware rollback has been scheduled with error(s)."
+ assert result["job_status"]["JobState"] == "Failed"
+ job_status.update({"JobState": "Scheduled"})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 0, False))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "Successfully scheduled the job for firmware rollback."
+ assert result["job_status"]["JobState"] == "Scheduled"
+ job_status = {}
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 0, False))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "Failed to complete the job for firmware rollback."
+ redfish_default_args.update({"reboot": True, "name": "BIOS", "reboot_timeout": -1})
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result["msg"] == "The parameter reboot_timeout value cannot be negative or zero."
+ redfish_default_args.update({"reboot": False, "name": "BIOS", "reboot_timeout": 900})
+ job_status.update({"JobState": "Completed"})
+ mocker.patch(MODULE_PATH + "redfish_firmware_rollback.rollback_firmware", return_value=(job_status, 0, True))
+ result = self._run_module(redfish_default_args)
+ assert result["msg"] == "Successfully completed the job for firmware rollback."
+
+ def test_get_rollback_preview_target(self, redfish_connection_mock, redfish_response_mock, redfish_default_args):
+ redfish_default_args.update({"username": "user", "password": "pwd", "baseuri": "XX.XX.XX.XX",
+ "name": "BIOS", "reboot_timeout": 3600})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.json_data = {"Actions": {"#UpdateService.SimpleUpdate": {}}}
+ with pytest.raises(Exception) as ex:
+ self.module.get_rollback_preview_target(redfish_connection_mock, f_module)
+ assert ex.value.args[0] == "The target firmware version does not support the firmware rollback."
+ redfish_response_mock.json_data = {
+ "Actions": {"#UpdateService.SimpleUpdate": {"target": "/redfish/v1/SimpleUpdate"}},
+ "FirmwareInventory": {"@odata.id": "/redfish/v1/FirmwareInventory"},
+ "Members": [
+ {"@odata.id": "uri/1", "Id": "Previous.1", "Name": "QLogic.1", "Version": "1.2"},
+ {"@odata.id": "uri/2", "Id": "Previous.2", "Name": "QLogic.2", "Version": "1.2"},
+ {"@odata.id": "uri/3", "Id": "Previous.3", "Name": "QLogic.3", "Version": "1.2"},
+ {"@odata.id": "uri/4", "Id": "Previous.4", "Name": "QLogic.4", "Version": "1.2"}]
+ }
+ with pytest.raises(Exception) as ex:
+ self.module.get_rollback_preview_target(redfish_connection_mock, f_module)
+ assert ex.value.args[0] == "No changes found to be applied."
+ f_module.check_mode = True
+ with pytest.raises(Exception) as ex:
+ self.module.get_rollback_preview_target(redfish_connection_mock, f_module)
+ assert ex.value.args[0] == "No changes found to be applied."
+ redfish_response_mock.json_data["Members"] = [
+ {"@odata.id": "uri/1", "Id": "Previous.1", "Name": "QLogic.1", "Version": "1.2"},
+ {"@odata.id": "uri/2", "Id": "Previous.2", "Name": "QLogic.2", "Version": "1.2"},
+ {"@odata.id": "uri/3", "Id": "Previous.3", "Name": "QLogic.3", "Version": "1.2"},
+ {"@odata.id": "uri/4", "Id": "Previous.4", "Name": "BIOS", "Version": "1.2"}
+ ]
+ with pytest.raises(Exception) as ex:
+ self.module.get_rollback_preview_target(redfish_connection_mock, f_module)
+ assert ex.value.args[0] == "Changes found to be applied."
+ f_module.check_mode = False
+ result = self.module.get_rollback_preview_target(redfish_connection_mock, f_module)
+ assert result[0] == ["uri/4"]
+ assert result[2] == "/redfish/v1/SimpleUpdate"
+
+ def test_get_job_status(self, redfish_connection_mock, redfish_response_mock, redfish_default_args, mocker):
+ redfish_default_args.update({"username": "user", "password": "pwd", "baseuri": "XX.XX.XX.XX", "Name": "BIOS",
+ "reboot_timeout": 900})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.json_data = {"JobState": "Completed", "JobType": "FirmwareUpdate",
+ "Name": "Firmware Rollback: Network", "PercentComplete": 100}
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.wait_for_redfish_job_complete',
+ return_value=(redfish_response_mock, ""))
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.strip_substr_dict',
+ return_value={"JobState": "Completed", "JobType": "FirmwareUpdate",
+ "Name": "Firmware Rollback: Network", "PercentComplete": 100})
+ result = self.module.get_job_status(redfish_connection_mock, f_module, ["JID_123456789"], job_wait=True)
+ assert result[0] == [{'JobState': 'Completed', 'JobType': 'FirmwareUpdate',
+ 'Name': 'Firmware Rollback: Network', 'PercentComplete': 100}]
+ assert result[1] == 0
+ redfish_response_mock.json_data = {"JobState": "Failed", "JobType": "FirmwareUpdate",
+ "Name": "Firmware Rollback: Network", "PercentComplete": 100}
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.wait_for_redfish_job_complete',
+ return_value=(redfish_response_mock, ""))
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.strip_substr_dict',
+ return_value={"JobState": "Failed", "JobType": "FirmwareUpdate",
+ "Name": "Firmware Rollback: Network", "PercentComplete": 100})
+ result = self.module.get_job_status(redfish_connection_mock, f_module, ["JID_123456789"], job_wait=True)
+ assert result[0] == [{'JobState': 'Failed', 'JobType': 'FirmwareUpdate',
+ 'Name': 'Firmware Rollback: Network', 'PercentComplete': 100}]
+ assert result[1] == 1
+
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.wait_for_redfish_job_complete',
+ return_value=(redfish_response_mock, "some error message"))
+ with pytest.raises(Exception) as ex:
+ self.module.get_job_status(redfish_connection_mock, f_module, ["JID_123456789"], job_wait=True)
+ assert ex.value.args[0] == "Task excited after waiting for 900 seconds. Check console for " \
+ "firmware rollback status."
+
+ def test_simple_update(self, redfish_connection_mock, redfish_response_mock, redfish_default_args, mocker):
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.time.sleep', return_value=None)
+ preview_uri, update_uri = ["/uri/1"], ["/uri/SimpleUpdate"]
+ redfish_response_mock.headers = {"Location": "/job/JID_123456789"}
+ result = self.module.simple_update(redfish_connection_mock, preview_uri, update_uri)
+ assert result == ["JID_123456789"]
+
+ def test_require_session(self, redfish_connection_mock, redfish_response_mock, redfish_default_args):
+ redfish_default_args.update({"username": "user", "password": "pwd", "baseuri": "XX.XX.XX.XX", "Name": "BIOS"})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"Id": 1}
+ redfish_response_mock.headers = {"X-Auth-Token": "token_key"}
+ result = self.module.require_session(redfish_connection_mock, f_module)
+ assert result[0] == 1
+ assert result[1] == "token_key"
+
+ @pytest.mark.parametrize("exc_type", [RuntimeError, URLError, SSLValidationError, ConnectionError, KeyError,
+ ImportError, ValueError, TypeError, IOError, AssertionError, OSError])
+ def test_main_rollback_exception_handling_case(self, exc_type, mocker, redfish_default_args,
+ redfish_connection_mock, redfish_response_mock):
+ redfish_default_args.update({"name": "BIOS"})
+ redfish_response_mock.status_code = 400
+ redfish_response_mock.success = False
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type not in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.get_rollback_preview_target',
+ side_effect=exc_type('test'))
+ else:
+ mocker.patch(MODULE_PATH + 'redfish_firmware_rollback.get_rollback_preview_target',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
+ {"accept-type": ACCESS_TYPE}, StringIO(json_str)))
+ if exc_type == HTTPError:
+ result = self._run_module(redfish_default_args)
+ assert result['failed'] is True
+ elif exc_type == URLError:
+ result = self._run_module(redfish_default_args)
+ assert result['unreachable'] is True
+ else:
+ result = self._run_module_with_fail_json(redfish_default_args)
+ assert result['failed'] is True
+ if exc_type == HTTPError:
+ assert 'error_info' in result
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py
index 1477015a1..9c838febc 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_powerstate.py
@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 2.1.3
-# Copyright (C) 2020 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
+# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -15,7 +15,7 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import redfish_powerstate
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
@@ -24,6 +24,7 @@ from ansible.module_utils._text import to_text
tarrget_error_msg = "The target device does not support the system reset" \
" feature using Redfish API."
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+HTTPS_ADDRESS = 'https://testhost.com'
@pytest.fixture
@@ -247,7 +248,7 @@ class TestRedfishPowerstate(FakeAnsibleModule):
"""failuere case when system does not supports and throws http error not found"""
f_module = self.get_module_mock()
redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
- redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError('http://testhost.com', 404,
+ redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 404,
json.dumps(tarrget_error_msg), {},
None)
with pytest.raises(Exception) as exc:
@@ -258,7 +259,7 @@ class TestRedfishPowerstate(FakeAnsibleModule):
"""failure case when system does not supports and throws http error 400 bad request"""
f_module = self.get_module_mock()
redfish_connection_mock_for_powerstate.root_uri = "/redfish/v1/"
- redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ redfish_connection_mock_for_powerstate.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
tarrget_error_msg,
{}, None)
with pytest.raises(Exception, match=tarrget_error_msg) as exc:
@@ -468,7 +469,7 @@ class TestRedfishPowerstate(FakeAnsibleModule):
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'redfish_powerstate.run_change_power_state',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module_with_fail_json(redfish_default_args)
assert result['failed'] is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
index 55fb3535f..40160edf5 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
#
-# Dell EMC OpenManage Ansible Modules
-# Version 5.3.0
+# Dell OpenManage Ansible Modules
+# Version 7.0.0
# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -15,13 +15,14 @@ __metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import redfish_storage_volume
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from io import StringIO
from ansible.module_utils._text import to_text
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+HTTPS_ADDRESS = 'https://testhost.com'
@pytest.fixture
@@ -51,24 +52,30 @@ class TestStorageVolume(FakeAnsibleModule):
"encryption_types": "NativeDriveEncryption",
"encrypted": False,
"volume_id": "volume_id", "oem": {"Dell": "DellAttributes"},
- "initialize_type": "Slow"
+ "initialize_type": "Slow",
+ "reboot_server": True
}]
@pytest.mark.parametrize("param", arg_list1)
def test_redfish_storage_volume_main_success_case_01(self, mocker, redfish_default_args, module_mock,
- redfish_connection_mock_for_storage_volume, param):
+ redfish_connection_mock_for_storage_volume, param,
+ storage_volume_base_uri):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.validate_inputs')
mocker.patch(MODULE_PATH + 'redfish_storage_volume.fetch_storage_resource')
mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
return_value={"msg": "Successfully submitted volume task.",
"task_uri": "task_uri",
"task_id": 1234})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_apply_time_supported_and_reboot_required',
+ return_value=True)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_reboot')
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_job_tracking_required',
+ return_value=False)
redfish_default_args.update(param)
result = self._run_module(redfish_default_args)
assert result["changed"] is True
assert result['msg'] == "Successfully submitted volume task."
assert result["task"]["id"] == 1234
- assert result["task"]["uri"] == "task_uri"
arg_list2 = [
{"state": "absent"},
@@ -99,17 +106,21 @@ class TestStorageVolume(FakeAnsibleModule):
side_effect=exc_type('test'))
else:
mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
- side_effect=exc_type('http://testhost.com', 400, 'http error message',
+ side_effect=exc_type(HTTPS_ADDRESS, 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(redfish_default_args)
+ result = self._run_module(redfish_default_args)
assert 'task' not in result
assert 'msg' in result
- assert result['failed'] is True
+ if exc_type != URLError:
+ assert result['failed'] is True
+ else:
+ assert result['unreachable'] is True
if exc_type == HTTPError:
assert 'error_info' in result
msg1 = "Either state or command should be provided to further actions."
msg2 = "When state is present, either controller_id or volume_id must be specified to perform further actions."
+ msg3 = "Either state or command should be provided to further actions."
@pytest.mark.parametrize("input",
[{"param": {"xyz": 123}, "msg": msg1}, {"param": {"state": "present"}, "msg": msg2}])
@@ -119,6 +130,13 @@ class TestStorageVolume(FakeAnsibleModule):
self.module.validate_inputs(f_module)
assert exc.value.args[0] == input["msg"]
+ @pytest.mark.parametrize("input",
+ [{"param": {"state": "present", "controller_id": "abc"}, "msg": msg3}])
+ def test_validate_inputs_skip_case(self, input):
+ f_module = self.get_module_mock(params=input["param"])
+ val = self.module.validate_inputs(f_module)
+ assert not val
+
def test_get_success_message_case_01(self):
action = "create"
message = self.module.get_success_message(action, "JobService/Jobs/JID_1234")
@@ -131,7 +149,7 @@ class TestStorageVolume(FakeAnsibleModule):
message = self.module.get_success_message(action, None)
assert message["msg"] == "Successfully submitted {0} volume task.".format(action)
- @pytest.mark.parametrize("input", [{"state": "present"}, {"state": "absent"}, {"command": "initialize"}])
+ @pytest.mark.parametrize("input", [{"state": "present"}, {"state": "absent"}, {"command": "initialize"}, {"command": None}])
def test_configure_raid_operation(self, input, redfish_connection_mock_for_storage_volume, mocker):
f_module = self.get_module_mock(params=input)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_volume_create_modify',
@@ -195,6 +213,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock, storage_volume_base_uri):
redfish_response_mock.success = True
f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ f_module.check_mode = False
message = {"msg": "Successfully submitted delete volume task.", "task_uri": "JobService/Jobs",
"task_id": "JID_456"}
mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
@@ -210,6 +229,33 @@ class TestStorageVolume(FakeAnsibleModule):
self.module.perform_volume_deletion(f_module, redfish_connection_mock_for_storage_volume)
assert exc.value.args[0] == "'volume_id' option is a required property for deleting a volume."
+ def test_perform_volume_deletion_check_mode_case(self, mocker, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock, storage_volume_base_uri):
+ redfish_response_mock.success = True
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ f_module.check_mode = True
+ message = {"msg": "Changes found to be applied.", "task_uri": "JobService/Jobs"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action',
+ return_value=redfish_response_mock)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_deletion(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Changes found to be applied."
+
+ def test_perform_volume_deletion_check_mode_failure_case(self, mocker, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock, storage_volume_base_uri):
+ redfish_response_mock.code = 404
+ redfish_response_mock.success = False
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ f_module.check_mode = True
+ message = {"msg": "No changes found to be applied.", "task_uri": "JobService/Jobs"}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action',
+ return_value=redfish_response_mock)
+ with pytest.raises(Exception) as exc:
+ self.module.perform_volume_deletion(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "No changes found to be applied."
+
def test_perform_volume_create_modify_success_case_01(self, mocker, storage_volume_base_uri,
redfish_connection_mock_for_storage_volume):
f_module = self.get_module_mock(params={"volume_id": "volume_id", "controller_id": "controller_id"})
@@ -238,6 +284,21 @@ class TestStorageVolume(FakeAnsibleModule):
assert message["msg"] == "Successfully submitted modify volume task."
assert message["task_id"] == "JID_123"
+ def test_perform_volume_create_modify_success_case_03(self, mocker, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume,
+ redfish_response_mock):
+ f_module = self.get_module_mock(params={"volume_id": "volume_id"})
+ message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
+ "task_id": "JID_123"}
+ redfish_response_mock.success = False
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_volume_id_exists', return_value=redfish_response_mock)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ assert message["msg"] == "Successfully submitted modify volume task."
+ assert message["task_id"] == "JID_123"
+
def test_perform_volume_create_modify_failure_case_01(self, mocker, storage_volume_base_uri,
redfish_connection_mock_for_storage_volume,
redfish_response_mock):
@@ -264,7 +325,7 @@ class TestStorageVolume(FakeAnsibleModule):
def test_perform_storage_volume_action_exception_case(self, redfish_response_mock,
redfish_connection_mock_for_storage_volume):
redfish_response_mock.headers.update({"Location": "JobService/Jobs/JID_123"})
- redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
'', {}, None)
with pytest.raises(HTTPError) as ex:
self.module.perform_storage_volume_action("POST", "uri", redfish_connection_mock_for_storage_volume,
@@ -341,7 +402,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_connection_mock_for_storage_volume,
redfish_response_mock):
f_module = self.get_module_mock(params={"controller_id": "1234"})
- redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com',
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS,
404,
"Specified Controller 123 does"
" not exist in the System.",
@@ -359,7 +420,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock):
f_module = self.get_module_mock(params={"controller_id": "1234"})
msg = "http error"
- redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
msg, {}, None)
with pytest.raises(Exception, match=msg) as exc:
self.module.check_specified_identifier_exists_in_the_system(f_module,
@@ -389,7 +450,7 @@ class TestStorageVolume(FakeAnsibleModule):
f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1",
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"]})
val = self.module.check_physical_disk_exists(f_module, drive)
- assert val is True
+ assert val
def test_check_physical_disk_exists_success_case_02(self):
drive = [
@@ -400,7 +461,7 @@ class TestStorageVolume(FakeAnsibleModule):
]
f_module = self.get_module_mock(params={"controller_id": "RAID.Mezzanine.1C-1", "drives": []})
val = self.module.check_physical_disk_exists(f_module, drive)
- assert val is True
+ assert val
def test_check_physical_disk_exists_error_case_01(self):
drive = [
@@ -431,9 +492,10 @@ class TestStorageVolume(FakeAnsibleModule):
"block_size_bytes": 512,
"encryption_types": "NativeDriveEncryption",
"encrypted": True,
- "volume_type": "NonRedundant",
+ "raid_type": "RAID0",
"name": "VD1",
"optimum_io_size_bytes": 65536,
+ "apply_time": "Immediate",
"oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
"DiskCachePolicy": "Disabled",
"LockStatus": "Unlocked",
@@ -446,7 +508,7 @@ class TestStorageVolume(FakeAnsibleModule):
payload = self.module.volume_payload(f_module)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
"Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
- assert payload["VolumeType"] == "NonRedundant"
+ assert payload["RAIDType"] == "RAID0"
assert payload["Name"] == "VD1"
assert payload["BlockSizeBytes"] == 512
assert payload["CapacityBytes"] == 299439751168
@@ -454,15 +516,16 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["Encrypted"] is True
assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+ assert payload["@Redfish.OperationApplyTime"] == "Immediate"
def test_volume_payload_case_02(self):
param = {"block_size_bytes": 512,
- "volume_type": "NonRedundant",
+ "raid_type": "RAID0",
"name": "VD1",
"optimum_io_size_bytes": 65536}
f_module = self.get_module_mock(params=param)
payload = self.module.volume_payload(f_module)
- assert payload["VolumeType"] == "NonRedundant"
+ assert payload["RAIDType"] == "RAID0"
assert payload["Name"] == "VD1"
assert payload["BlockSizeBytes"] == 512
assert payload["OptimumIOSizeBytes"] == 65536
@@ -475,7 +538,7 @@ class TestStorageVolume(FakeAnsibleModule):
"block_size_bytes": 512,
"encryption_types": "NativeDriveEncryption",
"encrypted": False,
- "volume_type": "NonRedundant",
+ "raid_type": "RAID0",
"name": "VD1",
"optimum_io_size_bytes": 65536,
"oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
@@ -490,7 +553,7 @@ class TestStorageVolume(FakeAnsibleModule):
payload = self.module.volume_payload(f_module)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/" \
"Storage/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
- assert payload["VolumeType"] == "NonRedundant"
+ assert payload["RAIDType"] == "RAID0"
assert payload["Name"] == "VD1"
assert payload["BlockSizeBytes"] == 512
assert payload["CapacityBytes"] == 299439751168
@@ -499,6 +562,109 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+ def test_volume_payload_case_04(self, storage_volume_base_uri):
+ param = {
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
+ "capacity_bytes": 299439751168,
+ "block_size_bytes": 512,
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": True,
+ "volume_type": "NonRedundant",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536,
+ "oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
+ "DiskCachePolicy": "Disabled",
+ "LockStatus": "Unlocked",
+ "MediaType": "HardDiskDrive",
+ "ReadCachePolicy": "NoReadAhead",
+ "SpanDepth": 1,
+ "SpanLength": 2,
+ "WriteCachePolicy": "WriteThrough"}}}}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
+ "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ assert payload["RAIDType"] == "RAID0"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["CapacityBytes"] == 299439751168
+ assert payload["OptimumIOSizeBytes"] == 65536
+ assert payload["Encrypted"] is True
+ assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
+ assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+
+ def test_volume_payload_case_05(self, storage_volume_base_uri):
+ param = {
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-2:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-3:RAID.Mezzanine.1C-1"],
+ "capacity_bytes": 299439751168,
+ "block_size_bytes": 512,
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": True,
+ "raid_type": "RAID6",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536,
+ "oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
+ "DiskCachePolicy": "Disabled",
+ "LockStatus": "Unlocked",
+ "MediaType": "HardDiskDrive",
+ "ReadCachePolicy": "NoReadAhead",
+ "SpanDepth": 1,
+ "SpanLength": 2,
+ "WriteCachePolicy": "WriteThrough"}}}}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
+ "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ assert payload["RAIDType"] == "RAID6"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["CapacityBytes"] == 299439751168
+ assert payload["OptimumIOSizeBytes"] == 65536
+ assert payload["Encrypted"] is True
+ assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
+ assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+
+ def test_volume_payload_case_06(self, storage_volume_base_uri):
+ param = {
+ "drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-2:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-3:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-4:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-5:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-6:RAID.Mezzanine.1C-1",
+ "Disk.Bay.0:Enclosure.Internal.0-7:RAID.Mezzanine.1C-1"],
+ "capacity_bytes": 299439751168,
+ "block_size_bytes": 512,
+ "encryption_types": "NativeDriveEncryption",
+ "encrypted": True,
+ "raid_type": "RAID60",
+ "name": "VD1",
+ "optimum_io_size_bytes": 65536,
+ "oem": {"Dell": {"DellVirtualDisk": {"BusProtocol": "SAS", "Cachecade": "NonCachecadeVD",
+ "DiskCachePolicy": "Disabled",
+ "LockStatus": "Unlocked",
+ "MediaType": "HardDiskDrive",
+ "ReadCachePolicy": "NoReadAhead",
+ "SpanDepth": 1,
+ "SpanLength": 2,
+ "WriteCachePolicy": "WriteThrough"}}}}
+ f_module = self.get_module_mock(params=param)
+ payload = self.module.volume_payload(f_module)
+ assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
+ "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
+ assert payload["RAIDType"] == "RAID60"
+ assert payload["Name"] == "VD1"
+ assert payload["BlockSizeBytes"] == 512
+ assert payload["CapacityBytes"] == 299439751168
+ assert payload["OptimumIOSizeBytes"] == 65536
+ assert payload["Encrypted"] is True
+ assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
+ assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
+
def test_fetch_storage_resource_success_case_01(self, redfish_connection_mock_for_storage_volume,
redfish_response_mock):
f_module = self.get_module_mock()
@@ -551,7 +717,7 @@ class TestStorageVolume(FakeAnsibleModule):
f_module = self.get_module_mock()
msg = "Target out-of-band controller does not support storage feature using Redfish API."
redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
- redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 404,
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 404,
json.dumps(msg), {}, None)
with pytest.raises(Exception) as exc:
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
@@ -561,7 +727,7 @@ class TestStorageVolume(FakeAnsibleModule):
f_module = self.get_module_mock()
msg = "http error"
redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
- redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError('http://testhost.com', 400,
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
msg, {}, None)
with pytest.raises(Exception, match=msg) as exc:
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
@@ -579,7 +745,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock, storage_volume_base_uri):
param = {"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"],
"capacity_bytes": 214748364800, "block_size_bytes": 512, "encryption_types": "NativeDriveEncryption",
- "encrypted": False, "volume_type": "NonRedundant", "optimum_io_size_bytes": 65536}
+ "encrypted": False, "raid_type": "RAID0", "optimum_io_size_bytes": 65536}
f_module = self.get_module_mock(params=param)
f_module.check_mode = True
with pytest.raises(Exception) as exc:
@@ -598,7 +764,7 @@ class TestStorageVolume(FakeAnsibleModule):
"Members": [{"@odata.id": "/redfish/v1/Systems/System.Embedded.1/Storage/"
"RAID.Integrated.1-1/Volumes/Disk.Virtual.0:RAID.Integrated.1-1"}],
"Name": "VD0", "BlockSizeBytes": 512, "CapacityBytes": 214748364800, "Encrypted": False,
- "EncryptionTypes": ["NativeDriveEncryption"], "OptimumIOSizeBytes": 65536, "VolumeType": "NonRedundant",
+ "EncryptionTypes": ["NativeDriveEncryption"], "OptimumIOSizeBytes": 65536, "RAIDType": "RAID0",
"Links": {"Drives": [{"@odata.id": "Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"}]}}
param.update({"name": "VD0"})
f_module = self.get_module_mock(params=param)
@@ -608,3 +774,358 @@ class TestStorageVolume(FakeAnsibleModule):
f_module, redfish_connection_mock_for_storage_volume, "create",
"/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
assert exc.value.args[0] == "No changes found to be applied."
+
+ def test_check_mode_validation_01(self, redfish_connection_mock_for_storage_volume,
+ redfish_response_mock, storage_volume_base_uri):
+ param1 = {"volume_id": None, 'name': None}
+ f_module = self.get_module_mock(params=param1)
+ f_module.check_mode = False
+ result = self.module.check_mode_validation(f_module,
+ redfish_connection_mock_for_storage_volume,
+ "",
+ "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ assert not result
+
+ def test_check_raid_type_supported_success_case01(self, mocker, redfish_response_mock, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ param = {"raid_type": "RAID0", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {'StorageControllers': [{'SupportedRAIDTypes': ['RAID0', 'RAID6', 'RAID60']}]}
+ self.module.check_raid_type_supported(f_module,
+ redfish_connection_mock_for_storage_volume)
+
+ def test_check_raid_type_supported_success_case02(self, mocker, redfish_response_mock, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ param = {"volume_type": "NonRedundant", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {'StorageControllers': [{'SupportedRAIDTypes': ['RAID0', 'RAID6', 'RAID60']}]}
+ self.module.check_raid_type_supported(f_module,
+ redfish_connection_mock_for_storage_volume)
+
+ def test_check_raid_type_supported_success_case03(self, mocker, redfish_response_mock, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ param = {"raid_type": "RAID6", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {'StorageControllers': [{'SupportedRAIDTypes': ['RAID0', 'RAID6', 'RAID60']}]}
+ self.module.check_raid_type_supported(f_module,
+ redfish_connection_mock_for_storage_volume)
+
+ def test_check_raid_type_supported_success_case04(self, mocker, redfish_response_mock, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ param = {"raid_type": "RAID60", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {'StorageControllers': [{'SupportedRAIDTypes': ['RAID0', 'RAID6', 'RAID60']}]}
+ self.module.check_raid_type_supported(f_module,
+ redfish_connection_mock_for_storage_volume)
+
+ def test_check_raid_type_supported_failure_case(self, mocker, redfish_response_mock, storage_volume_base_uri,
+ redfish_connection_mock_for_storage_volume):
+ param = {"raid_type": "RAID9", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {'StorageControllers': [{'SupportedRAIDTypes': ['RAID0', 'RAID6', 'RAID60']}]}
+ with pytest.raises(Exception) as exc:
+ self.module.check_raid_type_supported(f_module,
+ redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "RAID Type RAID9 is not supported."
+
+ def test_check_raid_type_supported_exception_case(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"volume_type": "NonRedundant", "controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
+ '', {}, None)
+ with pytest.raises(HTTPError) as ex:
+ self.module.check_raid_type_supported(f_module, redfish_connection_mock_for_storage_volume)
+
+ def test_get_apply_time_success_case_01(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"controller_id": "controller_id", "apply_time": "Immediate"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"@Redfish.OperationApplyTimeSupport": {"SupportedValues": ["Immediate"]}}
+ self.module.get_apply_time(f_module,
+ redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+
+ def test_get_apply_time_success_case_02(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"controller_id": "controller_id"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"@Redfish.OperationApplyTimeSupport": {"SupportedValues": ["Immediate"]}}
+ self.module.get_apply_time(f_module,
+ redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+
+ def test_get_apply_time_supported_failure_case(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"controller_id": "controller_id", "apply_time": "Immediate"}
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ redfish_response_mock.json_data = {"@Redfish.OperationApplyTimeSupport": {"SupportedValues": ["OnReset"]}}
+ with pytest.raises(Exception) as exc:
+ self.module.get_apply_time(f_module,
+ redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+ assert exc.value.args[0] == "Apply time Immediate \
+is not supported. The supported values are ['OnReset']. Enter the valid values and retry the operation."
+
+ def test_get_apply_time_supported_exception_case(self, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"controller_id": "controller_id", "apply_time": "Immediate"}
+ f_module = self.get_module_mock(params=param)
+ redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
+ '', {}, None)
+ with pytest.raises(HTTPError) as ex:
+ self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+
+ def test_check_apply_time_supported_and_reboot_required_success_case01(self, mocker,
+ redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"reboot_server": True}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
+ return_value="OnReset")
+ apply_time = self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume)
+ val = self.module.check_apply_time_supported_and_reboot_required(f_module,
+ redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+ assert val
+
+ def test_check_apply_time_supported_and_reboot_required_success_case02(self, mocker,
+ redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"reboot_server": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
+ return_value="Immediate")
+ apply_time = self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume)
+ val = self.module.check_apply_time_supported_and_reboot_required(f_module,
+ redfish_connection_mock_for_storage_volume,
+ controller_id="controller_id")
+ assert not val
+
+ def test_check_job_tracking_required_success_case01(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"job_wait": True}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
+ return_value="OnReset")
+ f_module = self.get_module_mock(params=param)
+ redfish_response_mock.success = True
+ val = self.module.check_job_tracking_required(f_module,
+ redfish_connection_mock_for_storage_volume,
+ reboot_required=False,
+ controller_id="controller_id")
+ assert not val
+
+ def test_check_job_tracking_required_success_case02(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"job_wait": True}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
+ return_value="Immediate")
+ f_module = self.get_module_mock(params=param)
+ val = self.module.check_job_tracking_required(f_module,
+ redfish_connection_mock_for_storage_volume,
+ reboot_required=True,
+ controller_id="controller_id")
+ assert val
+
+ def test_check_job_tracking_required_success_case03(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri):
+ param = {"job_wait": False}
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
+ return_value="Immediate")
+ f_module = self.get_module_mock(params=param)
+ val = self.module.check_job_tracking_required(f_module,
+ redfish_connection_mock_for_storage_volume,
+ reboot_required=True,
+ controller_id=None)
+ assert not val
+
+ def test_perform_reboot_timeout_case(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=("", "The job is not complete after 2 seconds."))
+ with pytest.raises(Exception) as exc:
+ self.module.perform_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "The job is not complete after 2 seconds."
+
+ def test_perform_reboot_success_case01(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ redfish_response_mock.json_data = {"JobState": "Completed"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is completed."))
+ val = self.module.perform_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert not val
+
+ def test_perform_reboot_success_case02(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": True}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Failed", "Id": "JID_123456789"}, True, ""))
+ redfish_response_mock.json_data = {"JobState": "Failed"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is failed."))
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.perform_force_reboot",
+ return_value=True)
+ val = self.module.perform_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert not val
+
+ def test_perform_reboot_without_output_case(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=("", False, ""))
+
+ val = self.module.perform_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert not val
+
+ def test_perform_force_reboot_timeout_case(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=("", "The job is not complete after 2 seconds."))
+ with pytest.raises(Exception) as exc:
+ self.module.perform_force_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "The job is not complete after 2 seconds."
+
+ def test_perform_force_reboot_success_case01(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": False}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ redfish_response_mock.json_data = {"JobState": "Completed"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is completed."))
+ val = self.module.perform_force_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert not val
+
+ def test_perform_reboot_success_case02(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ param = {"force_reboot": True}
+ f_module = self.get_module_mock(params=param)
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=({"JobState": "Completed", "Id": "JID_123456789"}, True, ""))
+ redfish_response_mock.json_data = {"JobState": "Failed"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is completed."))
+ with pytest.raises(Exception) as exc:
+ self.module.perform_force_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert exc.value.args[0] == "Failed to reboot the server."
+
+ def test_perform_force_reboot_without_output_case(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ f_module = self.get_module_mock()
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_redfish_reboot_job",
+ return_value=("", False, ""))
+ val = self.module.perform_force_reboot(f_module, redfish_connection_mock_for_storage_volume)
+ assert not val
+
+ def test_track_job_success_case01(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ job_id = "JID_123456789"
+ job_url = "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {"JobState": "Scheduled"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is scheduled."))
+ with pytest.raises(Exception) as exc:
+ self.module.track_job(f_module, redfish_connection_mock_for_storage_volume, job_id, job_url)
+ assert exc.value.args[0] == "The job is successfully submitted."
+
+ def test_track_job_success_case02(self, mocker,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ job_id = "JID_123456789"
+ job_url = "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"
+ f_module = self.get_module_mock()
+ redfish_response_mock = {}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job has no response."))
+ with pytest.raises(Exception) as exc:
+ self.module.track_job(f_module, redfish_connection_mock_for_storage_volume, job_id, job_url)
+ assert exc.value.args[0] == "The job has no response."
+
+ def test_track_job_success_case03(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ job_id = "JID_123456789"
+ job_url = "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {"JobState": "Failed"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is failed."))
+ with pytest.raises(Exception) as exc:
+ self.module.track_job(f_module, redfish_connection_mock_for_storage_volume, job_id, job_url)
+ assert exc.value.args[0] == "Unable to complete the task initiated for creating the storage volume."
+
+ def test_track_job_success_case04(self, mocker, redfish_response_mock,
+ redfish_connection_mock_for_storage_volume,
+ storage_volume_base_uri,
+ redfish_default_args):
+ job_id = "JID_123456789"
+ job_url = "/redfish/v1/Managers/iDRAC.Embedded.1/JID_123456789"
+ f_module = self.get_module_mock()
+ redfish_response_mock.json_data = {"JobState": "Success"}
+ mocker.patch(MODULE_PATH + "redfish_storage_volume.wait_for_job_completion",
+ return_value=(redfish_response_mock, "The job is failed."))
+ with pytest.raises(Exception) as exc:
+ self.module.track_job(f_module, redfish_connection_mock_for_storage_volume, job_id, job_url)
+ assert exc.value.args[0] == "The job is successfully completed."
+
+ def test_validate_negative_job_time_out(self, redfish_default_args):
+ redfish_default_args.update({"job_wait": True, "job_wait_timeout": -5})
+ f_module = self.get_module_mock(params=redfish_default_args)
+ with pytest.raises(Exception) as ex:
+ self.module.validate_negative_job_time_out(f_module)
+ assert ex.value.args[0] == "The parameter job_wait_timeout value cannot be negative or zero."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/utils.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/utils.py
new file mode 100644
index 000000000..bd264f6b3
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/utils.py
@@ -0,0 +1,49 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+
+import unittest
+import tempfile
+from unittest.mock import patch
+from ansible.module_utils import basic
+from ansible.module_utils.common.text.converters import to_bytes
+
+
+def set_module_args(args):
+ args['_ansible_remote_tmp'] = tempfile.gettempdir()
+ args['_ansible_keep_remote_files'] = False
+
+ args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
+ basic._ANSIBLE_ARGS = to_bytes(args)
+
+
+class AnsibleExitJson(Exception):
+ pass
+
+
+class AnsibleFailJson(Exception):
+ pass
+
+
+def exit_json(*args, **kwargs):
+ if "changed" not in kwargs:
+ kwargs["changed"] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs):
+ kwargs['failed'] = True
+ raise AnsibleFailJson(kwargs)
+
+
+class ModuleTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_module = patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
+ self.mock_module.start()
+ self.mock_sleep = patch('time.sleep')
+ self.mock_sleep.start()
+ set_module_args({})
+ self.addCleanup(self.mock_module.stop)
+ self.addCleanup(self.mock_sleep.stop)
diff --git a/ansible_collections/dellemc/openmanage/tests/requirements.txt b/ansible_collections/dellemc/openmanage/tests/unit/requirements.txt
index 3ea8227f8..324a0eebc 100644
--- a/ansible_collections/dellemc/openmanage/tests/requirements.txt
+++ b/ansible_collections/dellemc/openmanage/tests/unit/requirements.txt
@@ -5,5 +5,5 @@ mock
pytest-mock
pytest-cov
# pytest-ansible==2.0.1
-coverage==4.5.4
+coverage
netaddr>=0.7.19