summaryrefslogtreecommitdiffstats
path: root/ansible_collections/dellemc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:18:34 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-05 16:18:34 +0000
commit3667197efb7b18ec842efd504785965911f8ac4b (patch)
tree0b986a4bc6879d080b100666a97cdabbc9ca1f28 /ansible_collections/dellemc
parentAdding upstream version 9.5.1+dfsg. (diff)
downloadansible-3667197efb7b18ec842efd504785965911f8ac4b.tar.xz
ansible-3667197efb7b18ec842efd504785965911f8ac4b.zip
Adding upstream version 10.0.0+dfsg.upstream/10.0.0+dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/dellemc')
-rw-r--r--ansible_collections/dellemc/openmanage/.ansible-lint-ignore18
-rw-r--r--ansible_collections/dellemc/openmanage/.github/workflows/ansible-test.yml29
-rw-r--r--ansible_collections/dellemc/openmanage/CHANGELOG.rst112
-rw-r--r--ansible_collections/dellemc/openmanage/FILES.json1333
-rw-r--r--ansible_collections/dellemc/openmanage/MANIFEST.json4
-rw-r--r--ansible_collections/dellemc/openmanage/README.md10
-rw-r--r--ansible_collections/dellemc/openmanage/changelogs/changelog.yaml105
-rw-r--r--ansible_collections/dellemc/openmanage/docs/ATTRIBUTION.md27
-rw-r--r--ansible_collections/dellemc/openmanage/docs/EXECUTION_ENVIRONMENT.md1
-rw-r--r--ansible_collections/dellemc/openmanage/docs/README.md189
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst390
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst134
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst157
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst310
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst49
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst56
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst59
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst35
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst2
-rw-r--r--ansible_collections/dellemc/openmanage/meta/runtime.yml9
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_diagnostics.yml97
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml64
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_session.yml21
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_storage_volume.yml (renamed from ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml)12
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/roles/idrac_user/idrac_user.yml9
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/roles/vars_files/user.yml15
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/README.md3
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py7
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py322
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py23
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/dellemc_idrac_storage_volume.py5
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py874
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py515
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py21
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py425
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py924
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py14
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py39
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py18
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py198
-rw-r--r--ansible_collections/dellemc/openmanage/requirements.txt1
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml9
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml2
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml1
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml55
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml52
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml27
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml16
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml11
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CA/converge.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CSC/converge.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CTC/converge.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CustomCertificate/converge.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/HTTPS/converge.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/SSLKEY/converge.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml82
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/generateCSR/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/reset/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/cleanup.yml20
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/converge.yml20
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/verify.yml20
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/cifs_share/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/default/converge.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/ftp_share/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/http_share/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/https_share/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/httpsproxy_share/converge.yml22
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/negative_scenarios/converge.yml101
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/nfs_share/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml50
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/bios/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/controller/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/default/converge.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosure/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosureemm/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/fan/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/firmware/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/hostnic/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/idrac/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/license/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/memory/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/negative/converge.yml33
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/nic/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/pciedevice/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml66
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/secureboot/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/virtualdisk/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/backplane_assert.yml39
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/physicaldisk_assert.yml45
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml25
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/default/converge.yml142
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/converge.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/converge.yml25
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml19
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml20
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/prepare.yml7
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml64
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml72
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml39
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml24
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/clear_job_queue/converge.yml4
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml8
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/README.md347
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/defaults/main.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/handlers/main.yml2
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/meta/argument_specs.yml170
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/meta/main.yml21
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/converge.yml49
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/converge.yml46
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/converge.yml32
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/molecule.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/converge.yml54
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/prepare.yml26
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/converge.yml45
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/prepare.yml19
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/converge.yml51
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/molecule.yml5
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/prepare.yml19
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/converge.yml163
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/molecule.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/cleanup.yml17
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml9
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/absent.yml14
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/get_user.yml22
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/main.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/present.yml29
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tests/inventory2
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/tests/test.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_user/vars/main.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__create_virtual_drive.yml29
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__job_track.yml17
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml11
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/initialization/converge.yml82
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_session_utils.py415
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py1057
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py639
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py590
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py1178
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py379
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py18
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py38
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py155
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py8
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py161
-rw-r--r--ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml2
-rw-r--r--ansible_collections/dellemc/powerflex/CHANGELOG.rst7
-rw-r--r--ansible_collections/dellemc/powerflex/FILES.json73
-rw-r--r--ansible_collections/dellemc/powerflex/MANIFEST.json10
-rw-r--r--ansible_collections/dellemc/powerflex/README.md52
-rw-r--r--ansible_collections/dellemc/powerflex/changelogs/changelog.yaml5
-rw-r--r--ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md8
-rw-r--r--ansible_collections/dellemc/powerflex/docs/INSTALLATION.md4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/Release Notes.md9
-rw-r--r--ansible_collections/dellemc/powerflex/docs/SECURITY.md2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/device.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/info.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/sdc.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/sds.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/volume.rst2
-rw-r--r--ansible_collections/dellemc/powerflex/playbooks/roles/group_vars/all1
-rw-r--r--ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py2
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml29
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml4
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml36
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml26
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb3x.yml1
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/uninstall_tb.yml27
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_webui/tasks/install_webui.yml1
-rw-r--r--ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.18.txt14
-rw-r--r--ansible_collections/dellemc/unity/.github/workflows/ansible-test.yml117
-rw-r--r--ansible_collections/dellemc/unity/CHANGELOG.rst8
-rw-r--r--ansible_collections/dellemc/unity/FILES.json83
-rw-r--r--ansible_collections/dellemc/unity/MANIFEST.json10
-rw-r--r--ansible_collections/dellemc/unity/README.md64
-rw-r--r--ansible_collections/dellemc/unity/changelogs/.plugin-cache.yaml2
-rw-r--r--ansible_collections/dellemc/unity/changelogs/changelog.yaml5
-rw-r--r--ansible_collections/dellemc/unity/docs/CONTRIBUTING.md8
-rw-r--r--ansible_collections/dellemc/unity/docs/INSTALLATION.md4
-rw-r--r--ansible_collections/dellemc/unity/docs/ISSUE_TRIAGE.md4
-rw-r--r--ansible_collections/dellemc/unity/docs/MAINTAINER_GUIDE.md2
-rw-r--r--ansible_collections/dellemc/unity/docs/Release Notes.md12
-rw-r--r--ansible_collections/dellemc/unity/docs/SECURITY.md2
-rw-r--r--ansible_collections/dellemc/unity/meta/runtime.yml107
-rw-r--r--ansible_collections/dellemc/unity/plugins/doc_fragments/unity.py4
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/consistencygroup.py32
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/filesystem.py10
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/filesystem_snapshot.py170
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/info.py428
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/interface.py98
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/nasserver.py264
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/nfs.py20
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/nfsserver.py100
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/snapshot.py146
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/snapshotschedule.py8
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/storagepool.py25
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/tree_quota.py182
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/user_quota.py328
-rw-r--r--ansible_collections/dellemc/unity/plugins/modules/volume.py8
-rw-r--r--ansible_collections/dellemc/unity/tests/sanity/ignore-2.16.txt (renamed from ansible_collections/dellemc/unity/tests/sanity/ignore-2.13.txt)6
-rw-r--r--ansible_collections/dellemc/unity/tests/sanity/ignore-2.17.txt18
-rw-r--r--ansible_collections/dellemc/unity/tests/unit/requirements.txt (renamed from ansible_collections/dellemc/unity/tests/requirements.txt)0
254 files changed, 13644 insertions, 3358 deletions
diff --git a/ansible_collections/dellemc/openmanage/.ansible-lint-ignore b/ansible_collections/dellemc/openmanage/.ansible-lint-ignore
index 78ed06cb2..0ff93efe7 100644
--- a/ansible_collections/dellemc/openmanage/.ansible-lint-ignore
+++ b/ansible_collections/dellemc/openmanage/.ansible-lint-ignore
@@ -9,6 +9,7 @@ roles/redfish_storage_volume/molecule/RAID5/converge.yml var-naming[no-role-pref
roles/redfish_storage_volume/molecule/RAID10/converge.yml var-naming[no-role-prefix]
roles/redfish_storage_volume/molecule/RAID50/converge.yml var-naming[no-role-prefix]
roles/redfish_storage_volume/molecule/__delete_virtual_drive.yml var-naming[no-role-prefix]
+roles/redfish_storage_volume/molecule/__create_virtual_drive.yml var-naming[no-role-prefix]
roles/redfish_storage_volume/molecule/__idrac_reset.yml var-naming[no-role-prefix]
roles/redfish_storage_volume/molecule/apply_time_default/converge.yml var-naming[no-role-prefix]
roles/redfish_storage_volume/molecule/apply_time_immediate/converge.yml var-naming[no-role-prefix]
@@ -45,6 +46,10 @@ roles/idrac_job_queue/molecule/default/converge.yml var-naming[no-role-prefix]
roles/idrac_job_queue/molecule/clear_job_queue/converge.yml var-naming[no-role-prefix]
roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml var-naming[no-role-prefix]
+roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml var-naming[no-role-prefix]
+roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml var-naming[no-role-prefix]
+roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml var-naming[no-role-prefix]
+roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml var-naming[no-role-prefix]
roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml var-naming[no-role-prefix]
roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml var-naming[no-role-prefix]
roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml var-naming[no-role-prefix]
@@ -120,3 +125,16 @@ roles/idrac_attributes/molecule/default/converge.yml var-naming[no-role-prefix]
roles/idrac_attributes/molecule/idrac_attr/converge.yml var-naming[no-role-prefix]
roles/idrac_attributes/molecule/lifecycle_controller_attr/converge.yml var-naming[no-role-prefix]
roles/idrac_attributes/molecule/system_attr/converge.yml var-naming[no-role-prefix]
+
+roles/idrac_user/defaults/main.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152120/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152146/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152147/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152148/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152148/prepare.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152149/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152149/prepare.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152150/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/TC-152150/prepare.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/default/converge.yml var-naming[no-role-prefix]
+roles/idrac_user/molecule/resources/idrac_user/cleanup.yml var-naming[no-role-prefix]
diff --git a/ansible_collections/dellemc/openmanage/.github/workflows/ansible-test.yml b/ansible_collections/dellemc/openmanage/.github/workflows/ansible-test.yml
index 33251a189..3e0089fc4 100644
--- a/ansible_collections/dellemc/openmanage/.github/workflows/ansible-test.yml
+++ b/ansible_collections/dellemc/openmanage/.github/workflows/ansible-test.yml
@@ -15,7 +15,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
+ ansible-version: [stable-2.15, stable-2.16, stable-2.17, devel]
steps:
- name: Check out code
uses: actions/checkout@v3
@@ -47,17 +47,12 @@ jobs:
needs: [build]
strategy:
matrix:
- python: ['3.9', '3.10', '3.11']
+ python: ['3.10', '3.11']
ansible:
- - stable-2.14
- stable-2.15
- stable-2.16
+ - stable-2.17
- devel
- exclude:
- - ansible: stable-2.16
- python: '3.9'
- - ansible: devel
- python: '3.9'
runs-on: ubuntu-latest
steps:
@@ -79,17 +74,12 @@ jobs:
needs: [build]
strategy:
matrix:
- python: ['3.9', '3.10', '3.11']
+ python: ['3.10', '3.11']
ansible:
- - stable-2.14
- stable-2.15
- stable-2.16
+ - stable-2.17
- devel
- exclude:
- - ansible: stable-2.16
- python: '3.9'
- - ansible: devel
- python: '3.9'
runs-on: ubuntu-latest
steps:
- name: Perform sanity testing
@@ -108,13 +98,8 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.9', '3.10', '3.11']
- ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
- exclude:
- - ansible-version: stable-2.16
- python-version: '3.9'
- - ansible-version: devel
- python-version: '3.9'
+ python-version: ['3.10', '3.11']
+ ansible-version: [stable-2.15, stable-2.16, stable-2.17, devel]
steps:
# Important: This sets up your GITHUB_WORKSPACE environment variable
- name: Checkout the source code
diff --git a/ansible_collections/dellemc/openmanage/CHANGELOG.rst b/ansible_collections/dellemc/openmanage/CHANGELOG.rst
index bb78b5c2d..1620e6974 100644
--- a/ansible_collections/dellemc/openmanage/CHANGELOG.rst
+++ b/ansible_collections/dellemc/openmanage/CHANGELOG.rst
@@ -5,6 +5,118 @@ Dell OpenManage Ansible Modules Release Notes
.. contents:: Topics
+v9.2.0
+======
+
+Release Summary
+---------------
+
+- The idrac_session module is added to allow you to create and delete the sessions on iDRAC.
+- The idrac_reset module is enhanced to allow you to reset the iDRAC to factory default settings.
+
+Major Changes
+-------------
+
+- idrac_session - This module allows you to create and delete the sessions on iDRAC.
+
+Minor Changes
+-------------
+
+- idrac_reset - This module allows you to reset the iDRAC to factory default settings.
+
+Known Issues
+------------
+
+- idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics file to HTTP and HTTPS share via SOCKS proxy.
+- idrac_firmware - Issue(279282) - This module does not support firmware update using HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+- idrac_storage_volume - Issue(290766) - The module will report success instead of showing failure for new virtual creation on the BOSS-N1 controller if a virtual disk is already present on the same controller.
+- ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the share location fails on OME version 4.0.0.
+- ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage Enterprise Modular, however it does not allow the creation of multiple uplinks of the same name. If an uplink is created using the same name as an existing uplink, then the existing uplink is modified.
+
+New Modules
+-----------
+
+- dellemc.openmanage.idrac_session - Allows you to create and delete the sessions on iDRAC.
+
+v9.1.0
+======
+
+Release Summary
+---------------
+
+- ``redfish_storage_volume`` is enhanced to support iDRAC8.
+- ``dellemc_idrac_storage_module`` is deprecated and replaced with ``idrac_storage_volume``.
+
+Minor Changes
+-------------
+
+- redfish_storage_volume - This module is enhanced to support iDRAC8.
+
+Deprecated Features
+-------------------
+
+- The ``dellemc_idrac_storage_volume`` module is deprecated and replaced with ``idrac_storage_volume``.
+
+Bugfixes
+--------
+
+- Added support for RAID creation using NVMe disks.(https://github.com/dell/dellemc-openmanage-ansible-modules/issues/635)
+- redfish_storage_volume is enhanced to support iDRAC8.(https://github.com/dell/dellemc-openmanage-ansible-modules/issues/625)
+
+Known Issues
+------------
+
+- idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics file to HTTP and HTTPS share via SOCKS proxy.
+- idrac_firmware - Issue(279282) - This module does not support firmware update using HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+- idrac_storage_volume - Issue(290766) - The module will report success instead of showing failure for new virtual creation on the BOSS-N1 controller if a virtual disk is already present on the same controller.
+- ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the share location fails on OME version 4.0.0.
+- ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage Enterprise Modular, however it does not allow the creation of multiple uplinks of the same name. If an uplink is created using the same name as an existing uplink, then the existing uplink is modified.
+
+New Modules
+-----------
+
+- dellemc.openmanage.idrac_storage_volume - Configures the RAID configuration attributes.
+
+v9.0.0
+======
+
+Release Summary
+---------------
+
+- idrac_diagnostics module is added to run and export diagnostics on iDRAC.
+- idrac_user role is added to manage local users of iDRAC.
+
+Major Changes
+-------------
+
+- idrac_diagnostics - The module is introduced to run and export diagnostics on iDRAC.
+- idrac_user - This role is introduced to manage local users of iDRAC.
+
+Bugfixes
+--------
+
+- idrac_network_attributes - Issue(279049) - If unsupported values are provided for the parameter ``ome_network_attributes``, then this module does not provide a correct error message.
+- ome_device_network_services - Issue(212681) - The module does not provide a proper error message if unsupported values are provided for the following parameters- port_number, community_name, max_sessions, max_auth_retries, and idle_timeout.
+- ome_device_power_settings - Issue(212679) - The module displays the following message if the value provided for the parameter ``power_cap`` is not within the supported range of 0 to 32767, ``Unable to complete the request because PowerCap does not exist or is not applicable for the resource URI.``
+
+Known Issues
+------------
+
+- idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics file to HTTP and HTTPS share via SOCKS proxy.
+- idrac_firmware - Issue(279282) - This module does not support firmware update using HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+- ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the share location fails on OME version 4.0.0.
+- ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage Enterprise Modular, however it does not allow the creation of multiple uplinks of the same name. If an uplink is created using the same name as an existing uplink, then the existing uplink is modified.
+
+New Modules
+-----------
+
+- dellemc.openmanage.idrac_diagnostics - This module allows to run and export diagnostics on iDRAC.
+
+New Roles
+---------
+
+- dellemc.openmanage.idrac_user - Role to manage local users of iDRAC.
+
v8.7.0
======
diff --git a/ansible_collections/dellemc/openmanage/FILES.json b/ansible_collections/dellemc/openmanage/FILES.json
index db8150b4c..cd6b8725e 100644
--- a/ansible_collections/dellemc/openmanage/FILES.json
+++ b/ansible_collections/dellemc/openmanage/FILES.json
@@ -15,13 +15,6 @@
"format": 1
},
{
- "name": ".ansible-lint-ignore",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2b4cb17a21587a17f096001e3cbd9e8b019a667eb724cf01396a35ff5bad2558",
- "format": 1
- },
- {
"name": ".github",
"ftype": "dir",
"chksum_type": null,
@@ -88,7 +81,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "24f87fbc03181be62e4cedf4a463ea5eaab0b5bd4ec68d762a2647015712cd5a",
+ "chksum_sha256": "d797937de9e579f1ecc980d7db21d2e3d36055f4212cad8103d4dba0d7d140cd",
"format": 1
},
{
@@ -99,13 +92,6 @@
"format": 1
},
{
- "name": "CHANGELOG.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0b095556983a9c9b8a231db06b80b6b529483932af3c6978be58d7b53e9dcc56",
- "format": 1
- },
- {
"name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
@@ -113,13 +99,6 @@
"format": 1
},
{
- "name": "README.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f780a795a434441926ca75949a5675e2f7d2f40378e87d4ac6e918b70e6dd312",
- "format": 1
- },
- {
"name": "bindep.txt",
"ftype": "file",
"chksum_type": "sha256",
@@ -141,17 +120,17 @@
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a90add93498588062af0df9b8d61283c97523cd1d53e278a5f82826d90bd0b54",
+ "chksum_sha256": "c00012f627317aec0a7b0b4db3da8c43eb7eb63188eb3f7ee8c2319692aab2d7",
"format": 1
},
{
- "name": "changelogs/config.yaml",
+ "name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c00012f627317aec0a7b0b4db3da8c43eb7eb63188eb3f7ee8c2319692aab2d7",
+ "chksum_sha256": "743a69fffd94987fc63c58c728271c10cd363eff555161aef6d6a45d70300b3d",
"format": 1
},
{
@@ -211,13 +190,6 @@
"format": 1
},
{
- "name": "docs/EXECUTION_ENVIRONMENT.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f2e0b86dc19b76449e3bee834de2cadf3df5155623884753da9b03ae69b5b223",
- "format": 1
- },
- {
"name": "docs/ISSUE_TRIAGE.md",
"ftype": "file",
"chksum_type": "sha256",
@@ -232,13 +204,6 @@
"format": 1
},
{
- "name": "docs/README.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8be88ea146508ad7565154a1fb86d5d52f69f55e2ef0eca32402083a1e8e14fc",
- "format": 1
- },
- {
"name": "docs/SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
@@ -400,10 +365,10 @@
"format": 1
},
{
- "name": "docs/modules/idrac_reset.rst",
+ "name": "docs/modules/idrac_session.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f08e626ca74cd6d0dab35f749fcd6878e90118a4d1053ebf45e45685fd0dc8d",
+ "chksum_sha256": "b76827433fbcff22ddf95bc5632868b72cf395f419840c4e12d5373142c0f72c",
"format": 1
},
{
@@ -519,10 +484,10 @@
"format": 1
},
{
- "name": "docs/modules/ome_application_console_preferences.rst",
+ "name": "docs/modules/ome_device_local_access_configuration.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca4376734fb3ae51179cb231bb2e8a0b6e128056f277a75a00fa00d5dcbe0248",
+ "chksum_sha256": "d1166a89877d55ff1b942027f2027cd5509c8270f62ba4f5f1824317789f90b8",
"format": 1
},
{
@@ -603,10 +568,10 @@
"format": 1
},
{
- "name": "docs/modules/ome_device_local_access_configuration.rst",
+ "name": "docs/modules/ome_devices.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a7cc7259449e562a31a750dcb902c30af50ef1416f53c4ec4bcc3c143221443",
+ "chksum_sha256": "83328a82404890d15c1d5b6bdd0e8bfac5c9aca355fa3df468bdc744622fc439",
"format": 1
},
{
@@ -641,14 +606,14 @@
"name": "docs/modules/ome_device_quick_deploy.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e98c65583cb4ca7fdddebf1f33b91ffdc2f00fdf3b0dcc67782551e8cac0b577",
+ "chksum_sha256": "87e3c3bc99c687465681a9ab0001ab2fae508969f750fb1cd3fc475db7fd8491",
"format": 1
},
{
- "name": "docs/modules/ome_devices.rst",
+ "name": "docs/modules/idrac_reset.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3a493600c1706800ff504b61724823017744ff0336509b0edf555957105e9c0",
+ "chksum_sha256": "3fd35275a4b04236ac8166c50d34316ac0568d857e2a031478f31700458bb6c0",
"format": 1
},
{
@@ -893,7 +858,49 @@
"name": "docs/modules/redfish_storage_volume.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b32f32754226f2ac4dfbad0a6c4b156c8f5c1692d016b6e6c3208e7f9e94881c",
+ "chksum_sha256": "f08222e870ac987aa36b3b1f57b848ad595e876eee376b98bb9450f9a723ab67",
+ "format": 1
+ },
+ {
+ "name": "docs/modules/idrac_diagnostics.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dbabe7fd2ab37c6d7b1ced5ff8d6786bacd77162473c5064ddab94b060378c6b",
+ "format": 1
+ },
+ {
+ "name": "docs/modules/idrac_storage_volume.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d9f99982eda9f2b90ce1ee0eb805a56bc6d97eaa6935efdefefd20584a181ef3",
+ "format": 1
+ },
+ {
+ "name": "docs/modules/ome_application_console_preferences.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b64935ad594dde5a7496c90891053dbc8b32e21a5d582532bee85dcaf3fea42e",
+ "format": 1
+ },
+ {
+ "name": "docs/ATTRIBUTION.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0e55990cf8c6c469b4c35f0cf9ba6dd92d89f39d36e8d22f45a939a6edd06c20",
+ "format": 1
+ },
+ {
+ "name": "docs/EXECUTION_ENVIRONMENT.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c853f1c80c075bc775ec977562ae868177567256bef9d69543f6519cfd7b852a",
+ "format": 1
+ },
+ {
+ "name": "docs/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7e931cceff02aadb24f0f4afa56b4e1283505d82cbd41d1f535aabd3a58bf8c9",
"format": 1
},
{
@@ -914,7 +921,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b20215f8fccdf256ff05c7e1b24161dfac781b8fda913345dad2828a180a54b7",
+ "chksum_sha256": "dac8ac908fdd6b7bffe4a091c3f443b45716b1bc591e80cae03270731ef62598",
"format": 1
},
{
@@ -932,13 +939,6 @@
"format": 1
},
{
- "name": "playbooks/idrac/dellemc_idrac_storage_volume.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9903d13a50785663a1e781690f981645334d81e112a6872865118b6bac24a52e",
- "format": 1
- },
- {
"name": "playbooks/idrac/deprecated",
"ftype": "dir",
"chksum_type": null,
@@ -1100,13 +1100,6 @@
"format": 1
},
{
- "name": "playbooks/idrac/idrac_reset.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d2e2806fc4f6d092db801230b3ed1a437edae17bf98590b3ef7de0692fb0b2e0",
- "format": 1
- },
- {
"name": "playbooks/idrac/idrac_reset_result_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -1156,6 +1149,34 @@
"format": 1
},
{
+ "name": "playbooks/idrac/idrac_diagnostics.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f14fa78ada0d1c85ecd18419f0ec1940cbd387af3fd35e78da2e676228ed897e",
+ "format": 1
+ },
+ {
+ "name": "playbooks/idrac/idrac_storage_volume.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "739c488df021adb4d9cf2f8843dd4314bdad00f4076e22c2e0580d2d8b08ba7d",
+ "format": 1
+ },
+ {
+ "name": "playbooks/idrac/idrac_reset.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "de8fe2c4ad4c761b39cd3f2f26a33de198d13764460cd4ae48ddc41e3055c129",
+ "format": 1
+ },
+ {
+ "name": "playbooks/idrac/idrac_session.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b0440a47587c5d5ddb9912b53731727e93d0b889c12241594343f919d22f5bda",
+ "format": 1
+ },
+ {
"name": "playbooks/ome",
"ftype": "dir",
"chksum_type": null,
@@ -1317,17 +1338,17 @@
"format": 1
},
{
- "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml",
+ "name": "playbooks/ome/compliance/ome_configuration_compliance_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fb650fdb90cefcbc32a59f7fc5facf3413f43908a40bdbd93bde43f3570de8a",
+ "chksum_sha256": "e248d9baf3d9a443dd968b7dea92c70aba87d542b52a34d692daf7c3f595587e",
"format": 1
},
{
- "name": "playbooks/ome/compliance/ome_configuration_compliance_info.yml",
+ "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e248d9baf3d9a443dd968b7dea92c70aba87d542b52a34d692daf7c3f595587e",
+ "chksum_sha256": "bc42c37b1f2c9492b5b297c8245fc6836f4ae89dcdfebeaf958bce0847df6f73",
"format": 1
},
{
@@ -2213,17 +2234,31 @@
"format": 1
},
{
- "name": "plugins",
+ "name": "playbooks/roles/vars_files/user.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4dbddb5c3dedb08b2a42e3b2131cb515bca4ae9d83ae4a28a17ff4919993b587",
+ "format": 1
+ },
+ {
+ "name": "playbooks/roles/idrac_user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/README.md",
+ "name": "playbooks/roles/idrac_user/idrac_user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aaa3a9ba0daeec5a41786ee8e307e4663217563d1b1cdd1adf2cd4813ab6e9d0",
+ "chksum_sha256": "44346ffd0acd7bc28a4b0760772385552686673dc6421947e1b6d2a916b6e5b2",
+ "format": 1
+ },
+ {
+ "name": "plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
@@ -2311,13 +2346,6 @@
"format": 1
},
{
- "name": "plugins/module_utils/dellemc_idrac.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3d9f01ddb66c08650f51804e15be43c971f57e8f7960b9e8eb755b1694f655f3",
- "format": 1
- },
- {
"name": "plugins/module_utils/idrac_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2339,10 +2367,24 @@
"format": 1
},
{
+ "name": "plugins/module_utils/dellemc_idrac.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e60a6661de248b2be11330274cecf6af56989f04388f210ed3a6cf28753b57ee",
+ "format": 1
+ },
+ {
"name": "plugins/module_utils/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b089f767c4d9c3d6f396ddcfb50b7ee3280f4f9f4350171ef445d0f20f1abb6",
+ "chksum_sha256": "f5b4aa1d4f6bcdf22373061d2dd5d1f873f6cf277c3caded38c75c69d97f6556",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/session_utils.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "70a20d812a16708506bdd018e08bd378ed96d3906aa627b3f43c45c5ab1e6a55",
"format": 1
},
{
@@ -2381,10 +2423,10 @@
"format": 1
},
{
- "name": "plugins/modules/dellemc_idrac_storage_volume.py",
+ "name": "plugins/modules/redfish_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33401cd52a68385aba75f18e26cb79b0a6dd180a9d6f7770a14adb86ea65c8ec",
+ "chksum_sha256": "0c385c1f8206967b4b3a955e4cbe107df5895ee36a7653b87b41c07ac152f634",
"format": 1
},
{
@@ -2500,17 +2542,10 @@
"format": 1
},
{
- "name": "plugins/modules/idrac_reset.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "68fb61b540612a4ac7c86874e56fdeb1bb706d8dc7df382af6ec6060081ce69c",
- "format": 1
- },
- {
- "name": "plugins/modules/idrac_server_config_profile.py",
+ "name": "plugins/modules/idrac_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "175505c7f6ed9b09a2b1998e0c9dc44ccb619218ed4ac1a665e955b7d2c8b686",
+ "chksum_sha256": "70dedb6e3721bd47cdd4d1d8c058eb0db343c2217c6f5e742b40cea43939e40c",
"format": 1
},
{
@@ -2619,10 +2654,10 @@
"format": 1
},
{
- "name": "plugins/modules/ome_application_console_preferences.py",
+ "name": "plugins/modules/ome_device_local_access_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e35ef322bf87e64517906464c5c605ac560dbefe3a5d93acbb63c5388401b192",
+ "chksum_sha256": "3d3d5e42de5fdb980d19d4f943d725b75145cc112ed42a2524209c65cd66148d",
"format": 1
},
{
@@ -2703,10 +2738,10 @@
"format": 1
},
{
- "name": "plugins/modules/ome_device_local_access_configuration.py",
+ "name": "plugins/modules/ome_device_quick_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a00d9f17e5021eb03f67917415d1953bb64b3e9b5dbcbe9356d81f72115f9a2",
+ "chksum_sha256": "65d68eb6d1902ba5c818a0eab913cea192c68d807fd455bd74303fbfe31e7757",
"format": 1
},
{
@@ -2738,17 +2773,17 @@
"format": 1
},
{
- "name": "plugins/modules/ome_device_quick_deploy.py",
+ "name": "plugins/modules/ome_devices.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b702d20b9e2474c9ca9bc14f86c6fb6d7dc80350e77435ec6519e9976bd9627",
+ "chksum_sha256": "c94ff76fa306c5469bc5930c58f64b4e2423a76c835ed19cd315173e673601ee",
"format": 1
},
{
- "name": "plugins/modules/ome_devices.py",
+ "name": "plugins/modules/idrac_reset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "792fd9d7344be270366daed2872417a3e76e367667ac7c4c24dcb08b1119e284",
+ "chksum_sha256": "0bb2201ed119d6e26aa583427820f4d9b440c4489ad354a8508ed3ae96edee30",
"format": 1
},
{
@@ -2990,17 +3025,45 @@
"format": 1
},
{
- "name": "plugins/modules/redfish_storage_volume.py",
+ "name": "plugins/modules/idrac_diagnostics.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "43bc45da1a74df993f68a839aa1e4c743e0d774bd19e318f0e48abca127f51fa",
+ "chksum_sha256": "cc28820b11d8e6622f429ef77484778b0b9fa9a5d6d4fe559a58fba978fe724f",
"format": 1
},
{
- "name": "requirements.txt",
+ "name": "plugins/modules/dellemc_idrac_storage_volume.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4e9530ae54ec5e0e70142ae0c6585cd78af2ce511f24dd829affb0ba719424b3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/idrac_server_config_profile.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2158f3770b7aea8dfffc46bcea4a960857805a25df674e584c0c3c863dd1c04b",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/idrac_storage_volume.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b94ebb86f37b60cfcc1d06f924079bdb5502583c63b10dfc9b8eb6683fd02551",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ome_application_console_preferences.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1eb0dd33e5b833a1688dee170db6e84abaebaea1b38f73908013fd2ca74817a8",
+ "format": 1
+ },
+ {
+ "name": "plugins/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d9edf1a05b19caf48aab674c2d9e34c1b817a9b480255e91b73cf0f41e401b96",
+ "chksum_sha256": "bf8697057933ae95e9172b8fb6da9907557f5f086ed7e91da850a2fb573fcf9d",
"format": 1
},
{
@@ -3350,7 +3413,7 @@
"name": "roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b18eaefea67777d2cca717116654500a21607506a8097e180d6b91346f88f687",
+ "chksum_sha256": "789337cedb7a0e5f33484b00e7a4c9cbe40cb133841e1656f18cff08f042d7af",
"format": 1
},
{
@@ -3382,17 +3445,17 @@
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml",
+ "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d905bfe4faacf3a7e02a41f538327385ba1dc7bf778a95bf2787447a404db67d",
+ "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/molecule.yml",
+ "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
+ "chksum_sha256": "b9d44e3bf2e9c3dd4a24b59e4b33228d23fca8428f4060d6ace4a7e884fe469e",
"format": 1
},
{
@@ -3690,17 +3753,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fcfcac8b7c470be7db84a5ed8c0b958c6a056d0ef05b2873f81eededd75a3ed9",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "ab30d143cf010145f94f14c05cab91120f717be0cffadc9f348daffa7ac0e8ff",
"format": 1
},
{
@@ -3711,17 +3774,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4123a151aeef273229f90ea9d97454a56c6bc2614ab1b82e46d1b5a63bf4ead6",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "105480778b833d51453b2b22e7ac419eb3865b523bd5f979789e66feaa46c4db",
"format": 1
},
{
@@ -3732,17 +3795,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf2c97a9b021ec60a46c21fb122c575cf5a4c5cb931ca25deb80d8a3af1e4df3",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "808af97a5a234940ab3c38a93c54f4a164b7cb52ee47107137cc4555b53e9a1d",
"format": 1
},
{
@@ -3753,17 +3816,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29cae1e6b242c1d9129666e724c14c023d54c0dab247a6df3ff78dc6a02c23f4",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "9fb0c40a0d88c86aa9056a949b626eadceadaf9379731fd83045b2cb1c12aa14",
"format": 1
},
{
@@ -3777,14 +3840,14 @@
"name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d19f060b5c483683cb13e4bb7d5bcbcb8285a93185df49e9da280941fc9ea7a",
+ "chksum_sha256": "038690fb11c33de166dc94bf35d151639978151731963af7ec44234ced12eb06",
"format": 1
},
{
"name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
"format": 1
},
{
@@ -3816,17 +3879,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe2b08a47a39e288193df820dac93dedff7c9b1e0f81790201b8d34865db94dd",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "272d65772e3c0602b288c8afc69a647482a57d9572e0d971aa9c9a35f5944b79",
"format": 1
},
{
@@ -3837,17 +3900,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "75317e78c11cd0f2c999b0a24869c569f6eb137a83d1e3831fb0e8d3db9656d4",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "78a4eaa877f5633e31f590a372723c137736dd11b9a113d36e198435755eb54c",
"format": 1
},
{
@@ -3858,17 +3921,17 @@
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e7a204475806673210f3155629e8fc017020e826606bc7cb67b78e7f3b3e556",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "b2b06a3776dfd89429ebc65b6e626e1caa9c3bb2c3210c208add9cad25e7b169",
"format": 1
},
{
@@ -4019,17 +4082,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CA/converge.yml",
+ "name": "roles/idrac_certificate/molecule/CA/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "920f4e9cb7ca5ef8393b92b8df4f47d7e92455e39cb0e0d56eac1411e2238cef",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CA/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/CA/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5bf154879b4a34b326240ccb33a490a5f8bc7f228248db93c02baaaa0869d09e",
"format": 1
},
{
@@ -4040,17 +4103,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CSC/converge.yml",
+ "name": "roles/idrac_certificate/molecule/CSC/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9877fa2c96f05981f6afc09470ec0b6feadda2f501d1e6380d8d438a8a367c83",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CSC/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/CSC/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "99431629f17cbe04456ef0f839377cb71a7ae2b7c541deba9a4b769bba4c06f4",
"format": 1
},
{
@@ -4061,17 +4124,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CTC/converge.yml",
+ "name": "roles/idrac_certificate/molecule/CTC/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db852bf66f0d560599d1631d6178abf0aea7e7c768553bf3e5163ab5ca3c9a80",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CTC/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/CTC/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ecd35a2c8d4a1223c5a37a4e80a2f9a950f633357f2eb2655f6f20ca30198c5c",
"format": 1
},
{
@@ -4082,17 +4145,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CustomCertificate/converge.yml",
+ "name": "roles/idrac_certificate/molecule/CustomCertificate/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56fea8f40c9d9eca3d4c42519c87a21da0a603f323a705d7eb9bc022e594e449",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CustomCertificate/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/CustomCertificate/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "65da72677ef3b2e1c383087b86fda3d45434287ce1cf2ddb4968b0a2ff0bf7c7",
"format": 1
},
{
@@ -4103,17 +4166,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/HTTPS/converge.yml",
+ "name": "roles/idrac_certificate/molecule/HTTPS/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a9858eda2d16422a41010c07c064193667ee573a295bd4393a7681cf0f159383",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/HTTPS/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/HTTPS/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f06ff94029cdaceaf9865f0299fc6013b0fea5193ddbd07d078f543eb146d27f",
"format": 1
},
{
@@ -4124,17 +4187,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/SSLKEY/converge.yml",
+ "name": "roles/idrac_certificate/molecule/SSLKEY/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c5c2fc158089dc6f39444bae637bb01e3ad81865b56fa72903b702580987d6",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/SSLKEY/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/SSLKEY/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0f8fc730b66884b45530be3fdbdbed659d79387466637b2fb129573fbc74cbee",
"format": 1
},
{
@@ -4173,17 +4236,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/default/converge.yml",
+ "name": "roles/idrac_certificate/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "462bfb673d20d3ea0a5b9a6731feacd316b74db4025013bad12141083bf62d1d",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/default/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "3b8b6811b2776890e59fdddbf667a5d2f8e25207cb478cf865c5c97140ac586a",
"format": 1
},
{
@@ -4194,17 +4257,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/generateCSR/converge.yml",
+ "name": "roles/idrac_certificate/molecule/generateCSR/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08b7b587facfba070a4b05e0a7cc17e3936f660d0d57b39a69c63a9955b9ee79",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/generateCSR/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/generateCSR/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "57033a2a72d8b711a5c6c2e46131a5b1b1ce8068b43a81f0a51dd44d022bfe36",
"format": 1
},
{
@@ -4215,17 +4278,17 @@
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/reset/converge.yml",
+ "name": "roles/idrac_certificate/molecule/reset/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6caadbdbb48e6ab7e9c2e326b312ca540813cecd18e5caedc7cf9d9f401abd90",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/reset/molecule.yml",
+ "name": "roles/idrac_certificate/molecule/reset/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5b646c45281197bec65800696292b5e6d62ba1aa36be2149c7de10b439ddba55",
"format": 1
},
{
@@ -4383,31 +4446,31 @@
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/cleanup.yml",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ebde9ca53897de40df1400880e6a2a91d81c7a83e56541ee410b675d781a063",
+ "chksum_sha256": "6c0d5bf979a0ad1541b496f173165b11f0ad14283391efde2c86ee35c477eb43",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/converge.yml",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1a0d846dff7ca3876c76b1e6cfd625ab62ff93133385f09983e3419025e53a0c",
+ "chksum_sha256": "cd932a799fefa81f0aafeb32b788e887e555c076f2b5fedb3ea75e81eb7f707b",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/molecule.yml",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c0d5bf979a0ad1541b496f173165b11f0ad14283391efde2c86ee35c477eb43",
+ "chksum_sha256": "621eb2a293781a0408900a6df0884360a7ed673e610b0cbdfe67db3221f62b16",
"format": 1
},
{
"name": "roles/idrac_export_server_config_profile/molecule/default/verify.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4108b95a87aad1d805cade4df5e3071720a3c449023378b488796e8d1b0baaff",
+ "chksum_sha256": "6674f5a55b2b9311c78b61713e305a7b6419f02de40cabd4fdb337f234cd88fa",
"format": 1
},
{
@@ -4579,17 +4642,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/cifs_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/cifs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a085cfe51d93783380fcf3c0900e392f570b0331849d3f6d475da29df6d19ce",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/cifs_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/cifs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "e9cba7b8006136cf795ca98cca97e649fb9965988a7c5b4669c6fa77919693b9",
"format": 1
},
{
@@ -4600,17 +4663,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/default/converge.yml",
+ "name": "roles/idrac_firmware/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5aa353429e55ed0e91057cdcbd8e4b8791d31c0f4e60d85f4ab62bf972d86fa6",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/default/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "b8f562b0f8963d6fb1653344be6a0391d63b54ed70b7f23e3086a030fc14463b",
"format": 1
},
{
@@ -4621,17 +4684,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/ftp_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/ftp_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49d98dc1fc2fce07f662446a0063c02e7f8cd93571cf38d56a7f85a278fa963c",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/ftp_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/ftp_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a031dc6f65a6c3a7b75af9a1cff7206ccbd32d061c2738fd180aee6d86368a4e",
"format": 1
},
{
@@ -4642,17 +4705,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/http_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/http_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5471b0eb5b4eb38016f0201623470b2dbed5d4a0c0cb849d582490795a13350",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/http_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/http_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "e78dce79455ba2ae773abf7ef878883286686923dbd95b7f157ac006200ca670",
"format": 1
},
{
@@ -4663,17 +4726,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/https_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/https_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "defd19ca5cd9cece94b5805b6fa2b0329f0bf38bcf8920a164875d49380acf0e",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/https_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/https_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "23232705312d495854c47436c1481e826373b541e19ec964ed6ad07a9fdd9208",
"format": 1
},
{
@@ -4684,17 +4747,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/httpsproxy_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/httpsproxy_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b89bbf2005ff14ca995095080b6f5408139c77fdf5b05df787f0299f5a64060",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/httpsproxy_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/httpsproxy_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "d362f33905add12d47dd5b02d798d55eb29ada085cda85e8cd37ba04f97cde87",
"format": 1
},
{
@@ -4705,17 +4768,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/negative_scenarios/converge.yml",
+ "name": "roles/idrac_firmware/molecule/negative_scenarios/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b9dd2af9cb698278463c46b3afbf3833cb9b8bc203d97eba7cad9d95fe65688",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/negative_scenarios/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/negative_scenarios/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "c65a450270cb2725ea0c0d77439695d6fdf0721f6711d35265baab3f8aacbf9e",
"format": 1
},
{
@@ -4726,17 +4789,17 @@
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/nfs_share/converge.yml",
+ "name": "roles/idrac_firmware/molecule/nfs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c8f6e3d66fdc1d105151bc0df8d444dd0ebd4e6bd986f59dbadeaca3a72b9d4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/nfs_share/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/nfs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "31631bf50bdb9a5b035dcaa97b4c1cba71139e886468ee04666191f8418a65ec",
"format": 1
},
{
@@ -4866,17 +4929,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/backplane/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/backplane/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f060c709ab360c2ea3299558afe6853d338a5ea57d673d419361c87525e44c69",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/backplane/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/backplane/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "2a1a0a21993f25c4bc9b53a65c20dfbc128821bbf5868dad00326fae00ff82e1",
"format": 1
},
{
@@ -4887,17 +4950,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/bios/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/bios/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19940920328ca99471e6c48105d744234a8601ac23be9f43ebc47f4dc199ee80",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/bios/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/bios/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "6aeb01343669a588a39fb5c079282e515ea89873d6e7bcc036e053f58092ae62",
"format": 1
},
{
@@ -4908,17 +4971,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/controller/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/controller/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa250d5db7bb8ba429a89882d0b7496f555b742634df347fb0d9832780032523",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/controller/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/controller/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "fd6e6417f014ec03b13867ac110beb0328e8d4e421d71e534002153a29e99b8a",
"format": 1
},
{
@@ -4950,17 +5013,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/default/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b14aa1092665e1c96e60baa45c8be0219a08702211efdadd89977ce8bd11bdc",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/default/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "cff3d242b9a5abe65abd87b232d6678b0952aea01ac388e894489e47577dfea3",
"format": 1
},
{
@@ -4971,17 +5034,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosure/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosure/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42dad150ec77379f3d5ec4dd2d5d009c4e0b1eb61f4bb6e9e0b88da5a0527c62",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosure/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosure/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "cf1906e1ced1cb774e276f108b46c8fcf7c47db2fd45f9abc75f98b15d5d6661",
"format": 1
},
{
@@ -4992,17 +5055,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosureemm/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosureemm/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4533ddb492d9a730609c0e45b1edf770dcc9f1aaa12a04f60d47dbb33ddb2bb4",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosureemm/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosureemm/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "0283a9aae3c3d8ba622a804822089373823a9f1494266ed068d65766055922d1",
"format": 1
},
{
@@ -5013,17 +5076,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/fan/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/fan/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87af8814a77e8103470cdf7e312c103d299006a6a2a358c4135f63feb41a2e08",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/fan/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/fan/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "3847cf869806ef54d8df586fea8862e60cdcf70de9f90598cd1f3417be3b2aef",
"format": 1
},
{
@@ -5034,17 +5097,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/firmware/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/firmware/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b86b895de8e9be2509eeaaef22b3e120df8c73da0de26a46276ffe96af33e44c",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/firmware/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/firmware/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "b9077637b7c957f46e1351f3e3f4e16e925c81a62e3381dd24169a3f0a7b4079",
"format": 1
},
{
@@ -5055,17 +5118,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/hostnic/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/hostnic/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d233cea48c1048fe9ac74653a6f05a9e471b178adcc448612d3e0ee44ac7f58",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/hostnic/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/hostnic/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "e75317b8512ae920138e3c7241d6b4582626d6e236b36128733b92f856543c53",
"format": 1
},
{
@@ -5076,17 +5139,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/idrac/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/idrac/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d5ca5f3141059ad14d844544b22e52ebaf2ab9d44fcb797d940b92dadfb3737",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/idrac/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/idrac/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "1793fd4df60558619a9c94aef7599243482429d81c8a28e776b91f850221b59a",
"format": 1
},
{
@@ -5097,17 +5160,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/license/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/license/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db2cddb39bc38b89e5db58beda357a60f7d4c5cae9ca6662ab0d42fd1136396c",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/license/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/license/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "555ad87a256a73804d979ffca0fb14349aa5ce521c463fc7daa32d2a6d394a4d",
"format": 1
},
{
@@ -5118,17 +5181,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/memory/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/memory/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30e86739e7e1e1e18877223dbe17deca255fad88c9d955da03693161aaec7498",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/memory/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/memory/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "0a937be08af8078010259968d6dc6ef8836ed89caea61e997db31fec54b7f7b5",
"format": 1
},
{
@@ -5139,17 +5202,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/negative/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/negative/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71f132a153b37c0facdb202a3134776049908e882a419fd7142a3fb9f01a185a",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/negative/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/negative/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "abcc6362b778c7dd7c367130c0f52564cb65a37a314aa41817ae19252f020ff7",
"format": 1
},
{
@@ -5160,17 +5223,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/nic/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/nic/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "896dc582912c1c53fed8e72bb323f260616d2dfc79a2ed43fbd5bccad815c297",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/nic/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/nic/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "88018366757580a7dd6c975481cf2098d8e6add7a9400aae149886c98cec2241",
"format": 1
},
{
@@ -5181,17 +5244,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/passensor/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/passensor/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf90cf12505b3178a5fd41ebc0b9288aab73b841ec7e7ccd759247625766c624",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/passensor/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/passensor/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "85eac7dc208c39b391e4f286622829eb99c1f1331cd575f808b374512aed978e",
"format": 1
},
{
@@ -5202,17 +5265,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/pciedevice/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/pciedevice/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5df2dbc3b20bf098c08428c4dc85b79ecb447431305bcdf35b26e8320af87a11",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/pciedevice/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/pciedevice/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "5c0c515a34feab6e6b68e051513ac03a3ac94d3cc8cba176aaed27996ad0287e",
"format": 1
},
{
@@ -5223,17 +5286,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/physicaldisk/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/physicaldisk/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a3d8d5bdceb4660aae165f96fd61a3b0361d0118e3db38f45fe89e10d7646843",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/physicaldisk/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/physicaldisk/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f842fce3a08c9578c56eb1bea950a9f2aef3952b50bb7ebcc55992f0ff089004",
"format": 1
},
{
@@ -5244,17 +5307,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/powersupply/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/powersupply/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4593adf60c90356bc9aa47f91bea16c718884b95a2ce860608995727c3645bb",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/powersupply/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/powersupply/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "b887541028b49abb21f65fbdcf0505d0d9e06681956366b31119264422c2b155",
"format": 1
},
{
@@ -5265,17 +5328,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/secureboot/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/secureboot/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6e792336231dcfa4207c118ba29d754f4cf4cc5a1beca44ed9d7188f4367e85",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/secureboot/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/secureboot/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "90a6628784d8f22ff412233df90b45f4ca670001c322ad02347933ebaac6a04c",
"format": 1
},
{
@@ -5286,17 +5349,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsbattery/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b784dcb655f275d30c967cb253e5b3ffe09f4a3f3ddd3d5fbc1166133e962dd",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsbattery/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "678657b674bd8d79ae67af9a8ebca26b47e40fcaf6f958e66a15e1517b6b3cdb",
"format": 1
},
{
@@ -5307,17 +5370,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3c8cbcd170c5cc941748c35b35206c8b55d2371b663d068ebab44330a758cba",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "87970ed0a6773bd5ab9c8a78c1555ec0f61228282b314573c33bd30da7102a8d",
"format": 1
},
{
@@ -5328,17 +5391,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72f0734accc6e0116d5cc038683efd3661b86e8cce47590edec9b09e62613aab",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "1dffcb2b8b2a5fb2b557d0961eaee2b1cbbd94f1f2aa26d998ad06fe6f314f0b",
"format": 1
},
{
@@ -5349,17 +5412,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/systemmetrics/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/systemmetrics/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "299c5b02f0b782f856fe72ab5d2a7e30d81cacafddb4abf843c6e2e8db42af29",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/systemmetrics/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/systemmetrics/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "54d9f77025d049c9b6572899b5bccc51968e0183e840768d2e3f3c0521aa7ce2",
"format": 1
},
{
@@ -5370,17 +5433,17 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/virtualdisk/converge.yml",
+ "name": "roles/idrac_gather_facts/molecule/virtualdisk/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a38bd071d17033ff2eb821b827f460c2a3a6d2ae415e6fef83ac523a1c74afe",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/virtualdisk/molecule.yml",
+ "name": "roles/idrac_gather_facts/molecule/virtualdisk/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "d7f64f87068ebc765d1151c2994688505ddf9d29b5a49749565d813a7fd71da9",
"format": 1
},
{
@@ -5587,13 +5650,6 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/backplane_assert.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "814795a1a735b378e46304827d8315436b94113bc2a229b35ed354886db6b927",
- "format": 1
- },
- {
"name": "roles/idrac_gather_facts/tests/asserts/controller_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5699,13 +5755,6 @@
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/physicaldisk_assert.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "611944e5b3bed5181b59b6cd940e8d6673076bff6209db3c7a4cd9d12608b984",
- "format": 1
- },
- {
"name": "roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5860,24 +5909,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f20c76524c38d431acea8853cd59b3deacc7a933da50ceb12ee5c9686608f686",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "2deadcf076f47e066b749446ece0948b1fc0be171ab883f67a32c64de9a9a7bd",
"format": 1
},
{
@@ -5888,17 +5930,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/default/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76dc6ec3bbc45acaa1363e8af18098ebdf641fdb97460659095a38744ff11f0c",
+ "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/default/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
+ "chksum_sha256": "2f2a440f3da11b5d518f74f7fe44fb76709607382cb11bd47fd424767eb8e7da",
"format": 1
},
{
@@ -5909,24 +5951,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2c3ab569ca359ac2fa8acfa7a3d3b77e5c1e44b8fee6a13b87888f32abe8ac0b",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "05ee90c1a034c6a7c4f02120c05554526172311af76c5a61767cab3705b114c0",
"format": 1
},
{
@@ -5937,24 +5972,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2bc993e0fb0f926cb49ad73e9afdb41ff60906c02739ede68fc1c817070577a7",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "4a9673edc4a062a85a757483b1587a1ebea5ec8545b6ec20cdf861afab9b38e5",
"format": 1
},
{
@@ -5965,13 +5993,6 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0e24bb4df02d6d331effe0c6bc95db3c0d7b38776ccf64b0bcf680cb3cee453d",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5979,10 +6000,10 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "c6440bd7d1c2e2fe63da11cf6345307f8d08d2ff2287a3007e3cb99eea47d7c4",
"format": 1
},
{
@@ -5993,24 +6014,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b03af0b95e571adc26e9e17d74cbaa0d0ad65e5764b24c0c063ffa073bb70408",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "70c1949f3736465e786717cb6a2311c4dfcc92861212161f8957ca6c932e5d6c",
"format": 1
},
{
@@ -6021,24 +6035,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "671c0272e561dfd3c8a4cf4b62dc1a0b2fc3212d389be919bb50d2dd842fb120",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "2f555e872a266977d5f0acb356ed20321dcf984564e9471d2fdc29b841d5a120",
"format": 1
},
{
@@ -6049,24 +6056,24 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "244a0b6dc7b307d86c9fdae8c9e4b4af60f67691ada7486a9313c9404397d25f",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "7e73dc08b0b670f9047ad969ec153670bae08fb4baf7db258216251c4160c083",
"format": 1
},
{
@@ -6077,24 +6084,24 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e2a677c8f26d168e7bf5b5e5efa07f121f70750d82e3d9bc1e384690ea55b7c",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "57802a2bfa00428e20c2baf45add16b9cb643b46bb5d0c325190699432473e86",
"format": 1
},
{
@@ -6105,24 +6112,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f76858a1818cf9026b33f774c00006dab25c933ca94d07e951e7d8bf7d225e92",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "e3fb9baf2fda4da08a609e07a9c56bca74569c01a0081e3630e9635295b31f0a",
"format": 1
},
{
@@ -6133,24 +6133,17 @@
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "762ff43cdf20d5c5107f8aeb15af3678adf24b75793fe3a15c886880f16e09e7",
- "format": 1
- },
- {
"name": "roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/prepare.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "c825c7c600c1dccac10930371fb8da29b38df7dfee460ab885266bf6e3006bd1",
"format": 1
},
{
@@ -6168,10 +6161,38 @@
"format": 1
},
{
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f247488b64a9ccaffe3f0240f0cf3a6e527b3ac952a786bcc715c436b397a00d",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6706025009360630843898df27085681a613507370fb0bb91a2c40fd03a2e8c7",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3985d5b2ca2f11661982e372f5c9e677144651dd2f7f167efd7e0e4b3d2c9231",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8a4f3a78e9d259c668ae748e224e36007b91372281024e1f514ad6aaaae72606",
+ "format": 1
+ },
+ {
"name": "roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "043e0949892dddbbf4f51d9b5146cf2fba7777d102dc55c3f5c5a9d2a7fbd73e",
+ "chksum_sha256": "b48f7002029c826a3402b360aaabe1a9301d34e249a49718aef9454d6dc9a557",
"format": 1
},
{
@@ -6301,17 +6322,17 @@
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/clear_job_queue/converge.yml",
+ "name": "roles/idrac_job_queue/molecule/clear_job_queue/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6230b7be5eb08101f84d73735097bc06c18160388f47bb8fcaafc602eb70d5e",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/clear_job_queue/molecule.yml",
+ "name": "roles/idrac_job_queue/molecule/clear_job_queue/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f040659df832a82b82f060fe657dcefd0e06f397600604b86b65ba6964e9e338",
"format": 1
},
{
@@ -6322,17 +6343,17 @@
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/default/converge.yml",
+ "name": "roles/idrac_job_queue/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d81bc68e8c8b2d39739998d44f3e799e80e6025dc671c773664a0e1c475066fb",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/default/molecule.yml",
+ "name": "roles/idrac_job_queue/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "0424782236b3a6a800a4a192f73f528f65b87c135f3a53203547d640e6cde330",
"format": 1
},
{
@@ -6343,17 +6364,17 @@
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/delete_job/converge.yml",
+ "name": "roles/idrac_job_queue/molecule/delete_job/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6821a330491cf5c51f659fefc17c1151818c5f8fbd9595df438aab1f149c7557",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/delete_job/molecule.yml",
+ "name": "roles/idrac_job_queue/molecule/delete_job/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f17ecfe1a83f4d34185ba19886855028b67ea7665f4a24b712ad741ee6d1e0fc",
"format": 1
},
{
@@ -7862,17 +7883,17 @@
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/default/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee862d62b26aee6030c4da1ca247d8d8e8b26f53ad6c388877fa0cb68d881c74",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/default/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "171f69d2607d6cf5d8088b2e7a9231406cbf90c2bf74d40e8997aced0f0f08ce",
"format": 1
},
{
@@ -7883,17 +7904,31 @@
"format": 1
},
{
+ "name": "roles/redfish_storage_volume/molecule/initialization/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
"name": "roles/redfish_storage_volume/molecule/initialization/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d44b362192c52c569bb6fbea45fb8a09be10b9e44c6f48f8df4eec18e7a4905a",
+ "chksum_sha256": "d2204316b3f71e879600ede864aaa0e5b53ac0b5cc5422b4766a6e789a0d9dfd",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/initialization/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/__create_virtual_drive.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5ff523743f2c992be084d96d3e0bebf811c50fd09bad3e9b3bdf346c093a3914",
+ "format": 1
+ },
+ {
+ "name": "roles/redfish_storage_volume/molecule/__job_track.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4ac8080105eab85b5b077ac94669ff8fc03600675b5d86258ee27ca26e6ceebd",
"format": 1
},
{
@@ -7946,6 +7981,342 @@
"format": 1
},
{
+ "name": "roles/idrac_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "369f4ae5faa868c85788d6df11bf9446ae9fb5ed5d3c9f2808ed9e84cbc06c1a",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/defaults/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6ab3a9e6149fca242c0436f5630a97a2063f6232f42a9df4482b3977471d0be3",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/handlers/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "00a6e4df8f9fe8c7b2be5ed666c8a779836b8bdd3a57736d81729e080d52d27b",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/meta/argument_specs.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be066c11750b38f72617e0d31f8c107f945bb65448118a94bdb923a80babb2c0",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "36cbc64418daec070e7d63dcecf4ec4e7be341ef8a7661b9c5487e8903a48c2c",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152120",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152120/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "600419dfff67b4f0e13aceef503059c3573db9322dc487db74440b6489e93177",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152120/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152146",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152146/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c4fbf2ba3ab40583ae88f9b5b63744498b858acb41a33bb75a882f0974b47b3d",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152146/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152147",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152147/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1096107ae7f0281cbc4f43477562d8c9709ceb968ee5058b5880afb035cfac59",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152147/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152148",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152148/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "38da55bda10bc1de8eb098d795abe2588f27159b217e6c3157c5396273916e02",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152148/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152148/prepare.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "06c0f8c4fb2670cc81d9f1cfbd7d0664c88249ec23d5d0c35bc4177da0fd296c",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152149",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152149/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "20d79a5f4a9dafe606d8b6cde2accf782af12b9f1781f5df0682b93fcdf16ddf",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152149/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152149/prepare.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6018b3098204f927d9412e33103fb6cc93847ba09cd25a01df104efe45037753",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152150",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152150/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6220849df8cee2230b06defb296fc7569be79bff4e9f2e53dfc4c0ff4f108708",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152150/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/TC-152150/prepare.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f5f0a568105643945bdd6b58bf10914d1a66c46130e7ee8f4ffa7b70f0c387c7",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/default/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6b58bbd18e562ced8fbaccd5a23d479b69b4bec8659d246d62de5c135b291dcc",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/default/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/resources",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/resources/idrac_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/resources/idrac_user/cleanup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "291fe02f4f127b148c6bc9a2325d06c7e6497f4c853152a7fc42f3c5ccf479e5",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e080f7169503086fc1e8fc4e4fa95e56a0dd39403fe183f086ad9770ded41e2",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tasks/absent.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6fcebf5c4162fe906c18e2babb948835808726952abe198cc14caaaee1454546",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tasks/get_user.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7250bb2b6723ad29148ec605badbcc828f6656088ceaa7f4ad02b46dc4aa25dc",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cdf0aed407cb791aaabd992919f0d1449b6199b9fe04fe1ccdee43c7e8a3ef67",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tasks/present.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "31094d92ad1e121dfb3f27d6dc0e8fdf471ee01b2168ba2cfbd66078dd0054c0",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tests/inventory",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/tests/test.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "594b1a865eae9e2fc2a8f2f9daf0872a5c4a6b697af7167eadfb52df1d5009be",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_user/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "71a5ee7c5ff8f63dcdb4c815fd75a8694be391c701903e8297b8e3eecf83d12d",
+ "format": 1
+ },
+ {
"name": "tests",
"ftype": "dir",
"chksum_type": null,
@@ -8026,21 +8397,28 @@
"name": "tests/unit/plugins/module_utils/test_idrac_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06eee949e14418d09370c421d7d50cb5a92f19a1a49c8ee654ade95220350869",
+ "chksum_sha256": "789188c5bb56dbf5925ed648e6024de2c575580961f9ea94b48817d7edb98901",
"format": 1
},
{
"name": "tests/unit/plugins/module_utils/test_ome.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00f667133dfcb6305f2ecfde0900211733c873a35c6acb8f680adc4dbfa45a5a",
+ "chksum_sha256": "abdff09b79463b5bab943ab1eed8fe083290c62d5cd3f37e344cc72735687f6b",
"format": 1
},
{
"name": "tests/unit/plugins/module_utils/test_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d9bfc8945d1ed9d6f162a9146c9316d859615eb9af5aa162acd850be16be262d",
+ "chksum_sha256": "ea6f698bd5328030a3260bf0bdd354a268e67e3b1f2ad8a51a9eed59ad36218e",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/module_utils/test_session_utils.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b8279ca95da6d91825fc327a48c0a90cef7e714c15d9cac0a98f75dd823191ec",
"format": 1
},
{
@@ -8215,7 +8593,7 @@
"name": "tests/unit/plugins/modules/test_idrac_reset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "755381915a5d3433313f42e08e5169624a522675a8bf1d147aa9acf502c99b74",
+ "chksum_sha256": "0760350d332018dc32da2f005b39f7ae94983a2061df43084332ba6c78369eb6",
"format": 1
},
{
@@ -8250,7 +8628,7 @@
"name": "tests/unit/plugins/modules/test_idrac_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d7fbcf3629f88219471cb399b853bc28ea397291e7eafd7f67f71bbca7b62c1",
+ "chksum_sha256": "4284c52644fd69a018da7f858f8eae06c9d3f680517d748519820cda2780a5da",
"format": 1
},
{
@@ -8334,7 +8712,7 @@
"name": "tests/unit/plugins/modules/test_ome_application_console_preferences.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "265fd707c0cf99eda2b9f20feaccf3328d1f78c1ae45e196f8ce88b9b4e1c726",
+ "chksum_sha256": "23a93ab54d65b2e1d87c70f0ffedea617f124daad04b292a91d487d5cf8d25f8",
"format": 1
},
{
@@ -8418,21 +8796,21 @@
"name": "tests/unit/plugins/modules/test_ome_device_local_access_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4de5185ae43025acd83012bd9eaccf2c05d9b94d00bd985483f04b15ee502bbb",
+ "chksum_sha256": "f83c9a95759be04419109a20d5a49512a7fab40c5d655aa3057d0a0e44bad861",
"format": 1
},
{
"name": "tests/unit/plugins/modules/test_ome_device_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f624abfb88ee235fd29dbb68f5ddcc77463085603e06b5f9bbb03c7471c5b32",
+ "chksum_sha256": "cbd189a7cd877069342d9d55a3abedde08cc7c8982aa3d1387f256baf69c5ade",
"format": 1
},
{
"name": "tests/unit/plugins/modules/test_ome_device_mgmt_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d419a694ec5d1ba0fdebc86f1dc68aa7ee25c3a2cccb787e57d003741dadf66",
+ "chksum_sha256": "82e5e3b741916fe2b28da752bf12995c0b38e3ff40af125204d67a14e04b6f12",
"format": 1
},
{
@@ -8446,21 +8824,21 @@
"name": "tests/unit/plugins/modules/test_ome_device_power_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8fe2d243278b33bf748001911a1be0704134fafe7bb59f66b6e5485cca2fe12a",
+ "chksum_sha256": "5c305977b799fc5acacb5a13a1bb1b724b874c15c47a51402921b4b9edda6a4c",
"format": 1
},
{
"name": "tests/unit/plugins/modules/test_ome_device_quick_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0acd2b6ae2cbaf52be36bfe9ba2886c4116c79fab354111c65714eedcef47c2",
+ "chksum_sha256": "9635a328e7d8b444de75b441e3fd8f15c7bbb407ea4b8f4ee7c970215596b695",
"format": 1
},
{
"name": "tests/unit/plugins/modules/test_ome_devices.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1d265161a08069cf7e386ab5d56d4ba62dbf4e501da32b69d05bd31450c349e",
+ "chksum_sha256": "2db61429200f99069795d059690c4fdac00d946bad828827b8f8d6a686ea3da8",
"format": 1
},
{
@@ -8702,17 +9080,38 @@
"format": 1
},
{
+ "name": "tests/unit/plugins/modules/utils.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6dd69e26e1abab9e11a3c0d8e6212b37d8619036e394b351ccc99e480976da28",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_idrac_diagnostics.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8dc7291aa625412b4b452c5e7931cb7a62e1f41e2f32fa84679242398fcd5d50",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_idrac_storage_volume.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd47447b5e83792a73cab14880da8ba3dc5300f1d3e5b2e46f199d5de323826b",
+ "format": 1
+ },
+ {
"name": "tests/unit/plugins/modules/test_redfish_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54ccd9a59a3da074cbc61424bac980ccbe07ba4b01b2cb4116523c42f339fb9d",
+ "chksum_sha256": "8673a7d654d7eebe6b54ace65b26922d3c23c8f35563cb27ebb8b15cd68899a2",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/utils.py",
+ "name": "tests/unit/plugins/modules/test_idrac_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6dd69e26e1abab9e11a3c0d8e6212b37d8619036e394b351ccc99e480976da28",
+ "chksum_sha256": "b2417f4e5552f889987522e92e20aa0d2bf0d042022d157b71745df9549e4a16",
"format": 1
},
{
@@ -8721,6 +9120,34 @@
"chksum_type": "sha256",
"chksum_sha256": "5ec603ab1d2b3071743853324fd0db34d886f78f1543c1fa700ad1c904a6fd25",
"format": 1
+ },
+ {
+ "name": ".ansible-lint-ignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9259c9753c32d36dfca9f4c9858b16b12e03ab2cd5eeece7f53ee9ad7bb2b2aa",
+ "format": 1
+ },
+ {
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6f0f045a08a6f73f1903309c1636aefca64bad5073cf5e1b3d092f0f4fc1806b",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c9d74f37191657712d94c31aa6fdddebc11c869e8c6c77817fd005b71bce07dd",
+ "format": 1
+ },
+ {
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4a32d700abcd8da5d5122d404d99655a440ea072d77a6c84b5cc02382f4887b",
+ "format": 1
}
],
"format": 1
diff --git a/ansible_collections/dellemc/openmanage/MANIFEST.json b/ansible_collections/dellemc/openmanage/MANIFEST.json
index bf8b402bc..f1bda6107 100644
--- a/ansible_collections/dellemc/openmanage/MANIFEST.json
+++ b/ansible_collections/dellemc/openmanage/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "dellemc",
"name": "openmanage",
- "version": "8.7.0",
+ "version": "9.2.0",
"authors": [
"Jagadeesh N V <Jagadeesh.N.V@Dell.com>",
"Felix Stephen <Felix.S@Dell.com>",
@@ -46,7 +46,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "889b7354ab86fd7d07cb93c6efa113b3f470fb53c397a27b5b464adaf803e17e",
+ "chksum_sha256": "3c38b92a827649678ff0be97765d225e8df0902c89362e974b355d0a24549830",
"format": 1
},
"format": 1
diff --git a/ansible_collections/dellemc/openmanage/README.md b/ansible_collections/dellemc/openmanage/README.md
index b9ccc4cbb..5223d3612 100644
--- a/ansible_collections/dellemc/openmanage/README.md
+++ b/ansible_collections/dellemc/openmanage/README.md
@@ -5,6 +5,7 @@
[![Python version](https://img.shields.io/badge/python-3.9.6+-blue.svg)](https://www.python.org/downloads/)
[![Ansible version](https://img.shields.io/badge/ansible-2.15.6+-blue.svg)](https://pypi.org/project/ansible/)
[![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/dell/dellemc-openmanage-ansible-modules?include_prereleases&label=latest&style=flat-square)](https://github.com/dell/dellemc-openmanage-ansible-modules/releases)
+[![codecov](https://codecov.io/gh/dell/dellemc-openmanage-ansible-modules/branch/collections/graph/badge.svg)](https://app.codecov.io/gh/dell/dellemc-openmanage-ansible-modules)
Dell OpenManage Ansible Modules allows data center and IT administrators to use RedHat Ansible to automate and orchestrate the configuration, deployment, and update of Dell PowerEdge Servers and modular infrastructure by leveraging the management automation capabilities in-built into the Integrated Dell Remote Access Controller (iDRAC), OpenManage Enterprise (OME) and OpenManage Enterprise Modular (OMEM).
@@ -20,16 +21,17 @@ OpenManage Ansible Modules simplifies and automates provisioning, deployment, an
* [Security](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/SECURITY.md)
* [Documentation](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/DOCUMENTATION.md)
* [Execution Environment](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/EXECUTION_ENVIRONMENT.md)
+ * [Attribution](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/ATTRIBUTION.md)
* [Additional Information](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/ADDITIONAL_INFORMATION.md)
## Supported Platforms
- * iDRAC8 based Dell PowerEdge Servers with firmware versions 2.84.84.84 and above.
+ * iDRAC8 based Dell PowerEdge Servers with firmware versions 2.85.85.85 and above.
* iDRAC9 based Dell PowerEdge Servers with firmware versions 6.10.80.00 and above.
- * Dell OpenManage Enterprise versions 3.10.2 and 4.0.0.
+ * Dell OpenManage Enterprise versions 4.0.0 and 4.0.1.
* Dell OpenManage Enterprise Modular versions 2.10.10 and above.
## Prerequisites
- * [Ansible Core >= 2.16.2 and 2.15.8](https://github.com/ansible/ansible)
+ * [Ansible Core >= 2.16.4 and 2.15.9](https://github.com/ansible/ansible)
* Python >= 3.9.6
* To run the iDRAC modules, install OpenManage Python Software Development Kit (OMSDK)
using either ```pip install omsdk --upgrade``` or ```pip install -r requirements.txt```.
@@ -37,7 +39,7 @@ OpenManage Ansible Modules simplifies and automates provisioning, deployment, an
* Operating System
* Red Hat Enterprise Linux (RHEL) 9.3 and 8.9
* SUSE Linux Enterprise Server (SLES) 15 SP5 and 15 SP4
- * Ubuntu 22.04.3 and 22.04.2
+ * Ubuntu 22.04.4 and 22.04.3
## Installation
diff --git a/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml b/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
index 63ab4cdee..391fa597d 100644
--- a/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
+++ b/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
@@ -1729,3 +1729,108 @@ releases:
name: idrac_license
namespace: ''
release_date: '2024-01-31'
+
+ 9.0.0:
+ changes:
+ release_summary: '- idrac_diagnostics module is added to run and export diagnostics on iDRAC.
+
+ - idrac_user role is added to manage local users of iDRAC.'
+ bugfixes:
+ - ome_device_network_services - Issue(212681) - The module does not provide a proper
+ error message if unsupported values are provided for the following parameters-
+ port_number, community_name, max_sessions, max_auth_retries, and idle_timeout.
+ - ome_device_power_settings - Issue(212679) - The module displays the following
+ message if the value provided for the parameter ``power_cap`` is not within
+ the supported range of 0 to 32767, ``Unable to complete the request because
+ PowerCap does not exist or is not applicable for the resource URI.``
+ - idrac_network_attributes - Issue(279049) - If unsupported values are provided
+ for the parameter ``ome_network_attributes``, then this module does not provide
+ a correct error message.
+
+ major_changes:
+ - idrac_user - This role is introduced to manage local users of iDRAC.
+ - idrac_diagnostics - The module is introduced to run and export diagnostics on iDRAC.
+ known_issues:
+ - idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics
+ file to HTTP and HTTPS share via SOCKS proxy.
+ - idrac_firmware - Issue(279282) - This module does not support firmware update using
+ HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+ - ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the
+ share location fails on OME version 4.0.0.
+ - ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage
+ Enterprise Modular, however it does not allow the creation of multiple uplinks
+ of the same name. If an uplink is created using the same name as an existing
+ uplink, then the existing uplink is modified.
+ modules:
+ - description:
+ This module allows to run and export diagnostics on iDRAC.
+ name: idrac_diagnostics
+ namespace: ''
+ objects:
+ role:
+ - description: Role to manage local users of iDRAC.
+ name: idrac_user
+ namespace: null
+ release_date: '2024-02-29'
+ 9.1.0:
+ changes:
+ release_summary: '- ``redfish_storage_volume`` is enhanced to support iDRAC8.
+
+ - ``dellemc_idrac_storage_module`` is deprecated and replaced with ``idrac_storage_volume``.'
+ bugfixes:
+ - Added support for RAID creation using NVMe disks.(https://github.com/dell/dellemc-openmanage-ansible-modules/issues/635)
+ - redfish_storage_volume is enhanced to support iDRAC8.(https://github.com/dell/dellemc-openmanage-ansible-modules/issues/625)
+ deprecated_features:
+ - The ``dellemc_idrac_storage_volume`` module is deprecated and replaced with
+ ``idrac_storage_volume``.
+ minor_changes:
+ - redfish_storage_volume - This module is enhanced to support iDRAC8.
+ known_issues:
+ - idrac_storage_volume - Issue(290766) - The module will report success instead of showing
+ failure for new virtual creation on the BOSS-N1 controller if a virtual disk is already
+ present on the same controller.
+ - idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics
+ file to HTTP and HTTPS share via SOCKS proxy.
+ - idrac_firmware - Issue(279282) - This module does not support firmware update using
+ HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+ - ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the
+ share location fails on OME version 4.0.0.
+ - ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage
+ Enterprise Modular, however it does not allow the creation of multiple uplinks
+ of the same name. If an uplink is created using the same name as an existing
+ uplink, then the existing uplink is modified.
+ modules:
+ - description:
+ Configures the RAID configuration attributes.
+ name: idrac_storage_volume
+ namespace: ''
+ release_date: '2024-03-29'
+ 9.2.0:
+ changes:
+ release_summary: '- The idrac_session module is added to allow you to create and delete the sessions on iDRAC.
+
+ - The idrac_reset module is enhanced to allow you to reset the iDRAC to factory default settings.'
+ major_changes:
+ - idrac_session - This module allows you to create and delete the sessions on iDRAC.
+ minor_changes:
+ - idrac_reset - This module allows you to reset the iDRAC to factory default settings.
+ known_issues:
+ - idrac_storage_volume - Issue(290766) - The module will report success instead of showing
+ failure for new virtual creation on the BOSS-N1 controller if a virtual disk is already
+ present on the same controller.
+ - idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics
+ file to HTTP and HTTPS share via SOCKS proxy.
+ - idrac_firmware - Issue(279282) - This module does not support firmware update using
+ HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+ - ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the
+ share location fails on OME version 4.0.0.
+ - ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage
+ Enterprise Modular, however it does not allow the creation of multiple uplinks
+ of the same name. If an uplink is created using the same name as an existing
+ uplink, then the existing uplink is modified.
+ modules:
+ - description:
+ Allows you to create and delete the sessions on iDRAC.
+ name: idrac_session
+ namespace: ''
+ release_date: '2024-04-30'
diff --git a/ansible_collections/dellemc/openmanage/docs/ATTRIBUTION.md b/ansible_collections/dellemc/openmanage/docs/ATTRIBUTION.md
new file mode 100644
index 000000000..969a2d706
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/docs/ATTRIBUTION.md
@@ -0,0 +1,27 @@
+OpenSource Licenses for Dell OpenManage Ansible Modules
+=======================================================================
+
+***
+Package: xorriso
+Version: v1.5.6
+Copyright: Copyright © 2008 - 2022 Thomas Schmitt.
+License: [GPL version 3 or later](https://www.gnu.org/software/xorriso/)
+
+***
+Package: syslinux
+Version: v6.04
+Copyright: Copyright 1994-2011 H. Peter Anvin et al - All Rights Reserved
+License: [GPL-2.0 license or later](https://repo.or.cz/syslinux.git)
+
+***
+Package: isomd5sum
+Version: v1.2.3
+License: [GPL-2.0 license](https://github.com/rhinstaller/isomd5sum)
+
+***
+Package: wget
+Version: v1.21.4
+Copyright: Copyright © 2017 Free Software Foundation, Inc.
+License: [GNU General Public License](https://www.gnu.org/software/wget/)
+
+*** \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/docs/EXECUTION_ENVIRONMENT.md b/ansible_collections/dellemc/openmanage/docs/EXECUTION_ENVIRONMENT.md
index c5f556a72..fc5d0fff7 100644
--- a/ansible_collections/dellemc/openmanage/docs/EXECUTION_ENVIRONMENT.md
+++ b/ansible_collections/dellemc/openmanage/docs/EXECUTION_ENVIRONMENT.md
@@ -58,7 +58,6 @@ Build a image with the required Ansible collections and libraries, and then uplo
```yaml
omsdk
netaddr>=0.7.19
- jmespath
```
Note: The content of the *requirements.txt* can be found [here](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/requirements.txt)
diff --git a/ansible_collections/dellemc/openmanage/docs/README.md b/ansible_collections/dellemc/openmanage/docs/README.md
index 4f39dbc96..58c1d50fe 100644
--- a/ansible_collections/dellemc/openmanage/docs/README.md
+++ b/ansible_collections/dellemc/openmanage/docs/README.md
@@ -10,94 +10,103 @@ You may obtain a copy of the License at
# OpenManage Ansible Modules Documentation
### iDRAC Modules
-- [dellemc_configure_idrac_eventing](modules/dellemc_configure_idrac_eventing.rst)
-- [dellemc_configure_idrac_services](modules/dellemc_configure_idrac_services.rst)
-- [dellemc_idrac_lc_attributes](modules/dellemc_idrac_lc_attributes.rst)
-- [dellemc_idrac_storage_volume](modules/dellemc_idrac_storage_volume.rst)
-- [dellemc_system_lockdown_mode](modules/dellemc_system_lockdown_mode.rst)
-- [idrac_attributes](modules/idrac_attributes.rst)
-- [idrac_bios](modules/idrac_bios.rst)
-- [idrac_boot](modules/idrac_boot.rst)
-- [idrac_certificates](modules/idrac_certificates.rst)
-- [idrac_firmware](modules/idrac_firmware.rst)
-- [idrac_firmware_info](modules/idrac_firmware_info.rst)
-- [idrac_lifecycle_controller_jobs](modules/idrac_lifecycle_controller_jobs.rst)
-- [idrac_lifecycle_controller_job_status_info](modules/idrac_lifecycle_controller_job_status_info.rst)
-- [idrac_lifecycle_controller_logs](modules/idrac_lifecycle_controller_logs.rst)
-- [idrac_lifecycle_controller_status_info](modules/idrac_lifecycle_controller_status_info.rst)
-- [idrac_network_attributes](modules/idrac_network_attributes.rst)
-- [idrac_network](modules/idrac_network.rst)
-- [idrac_os_deployment](modules/idrac_os_deployment.rst)
-- [idrac_redfish_storage_controller](modules/idrac_redfish_storage_controller.rst)
-- [idrac_reset](modules/idrac_reset.rst)
-- [idrac_server_config_profile](modules/idrac_server_config_profile.rst)
-- [idrac_syslog](modules/idrac_syslog.rst)
-- [idrac_system_info](modules/idrac_system_info.rst)
-- [idrac_timezone_ntp](modules/idrac_timezone_ntp.rst)
-- [idrac_user](modules/idrac_user.rst)
-- [idrac_user_info](modules/idrac_user_info.rst)
-- [idrac_virtual_media](modules/idrac_virtual_media.rst)
-- [redfish_event_subscription](modules/redfish_event_subscription.rst)
-- [redfish_firmware](modules/redfish_firmware.rst)
-- [redfish_powerstate](modules/redfish_powerstate.rst)
-- [redfish_storage_volume](modules/redfish_storage_volume.rst)
-
+| Module Name | iDRAC8 | iDRAC9 |
+| ---------------------------------------------------------------------------------------------------- | ------ | ------ |
+| [dellemc_configure_idrac_eventing](modules/dellemc_configure_idrac_eventing.rst) | ✓ | ✓ |
+| [dellemc_configure_idrac_services](modules/dellemc_configure_idrac_services.rst) | ✓ | ✓ |
+| [dellemc_idrac_lc_attributes](modules/dellemc_idrac_lc_attributes.rst) | ✓ | ✓ |
+| [dellemc_idrac_storage_volume](modules/dellemc_idrac_storage_volume.rst) | ✓ | ✓ |
+| [dellemc_system_lockdown_mode](modules/dellemc_system_lockdown_mode.rst) | ✓ | ✓ |
+| [idrac_attributes](modules/idrac_attributes.rst) | ✓ | ✓ |
+| [idrac_bios](modules/idrac_bios.rst) | ✓ | ✓ |
+| [idrac_boot](modules/idrac_boot.rst) | ✓ | ✓ |
+| [idrac_certificates](modules/idrac_certificates.rst) | ✕ | ✓ |
+| [idrac_diagnostics](modules/idrac_diagnostics.rst) | ✕ | ✓ |
+| [idrac_firmware](modules/idrac_firmware.rst) | ✓ | ✓ |
+| [idrac_firmware_info](modules/idrac_firmware_info.rst) | ✓ | ✓ |
+| [idrac_lifecycle_controller_jobs](modules/idrac_lifecycle_controller_jobs.rst) | ✓ | ✓ |
+| [idrac_lifecycle_controller_job_status_info](modules/idrac_lifecycle_controller_job_status_info.rst) | ✓ | ✓ |
+| [idrac_lifecycle_controller_logs](modules/idrac_lifecycle_controller_logs.rst) | ✓ | ✓ |
+| [idrac_lifecycle_controller_status_info](modules/idrac_lifecycle_controller_status_info.rst) | ✓ | ✓ |
+| [idrac_network_attributes](modules/idrac_network_attributes.rst) | ✓ | ✓ |
+| [idrac_network](modules/idrac_network.rst) | ✓ | ✓ |
+| [idrac_os_deployment](modules/idrac_os_deployment.rst) | ✓ | ✓ |
+| [idrac_redfish_storage_controller](modules/idrac_redfish_storage_controller.rst) | ✕ | ✓ |
+| [idrac_reset](modules/idrac_reset.rst) | ✓ | ✓ |
+| [idrac_server_config_profile](modules/idrac_server_config_profile.rst) | ✓ | ✓ |
+| [idrac_session](modules/idrac_session.rst) | ✓ | ✓ |
+| [idrac_storage_volume](modules/idrac_storage_volume.rst) | ✓ | ✓ |
+| [idrac_syslog](modules/idrac_syslog.rst) | ✓ | ✓ |
+| [idrac_system_info](modules/idrac_system_info.rst) | ✓ | ✓ |
+| [idrac_timezone_ntp](modules/idrac_timezone_ntp.rst) | ✓ | ✓ |
+| [idrac_user](modules/idrac_user.rst) | ✓ | ✓ |
+| [idrac_user_info](modules/idrac_user_info.rst) | ✓ | ✓ |
+| [idrac_virtual_media](modules/idrac_virtual_media.rst) | ✓ | ✓ |
+| [redfish_event_subscription](modules/redfish_event_subscription.rst) | ✕ | ✓ |
+| [redfish_firmware](modules/redfish_firmware.rst) | ✕ | ✓ |
+| [redfish_firmware_rollback](modules/redfish_firmware_rollback.rst) | ✓ | ✓ |
+| [redfish_powerstate](modules/redfish_powerstate.rst) | ✓ | ✓ |
+| [redfish_storage_volume](modules/redfish_storage_volume.rst) | ✓ | ✓ |
+
### OpenManage Enterprise Modules
-- [ome_active_directory](modules/ome_active_directory.rst)
-- [ome_alert_policies](modules/ome_alert_policies.rst)
-- [ome_alert_policies_message_id_info](modules/ome_alert_policies_message_id_info.rst)
-- [ome_alert_policies_info](modules/ome_alert_policies_info.rst)
-- [ome_alert_policies_actions_info](modules/ome_alert_policies_actions_info.rst)
-- [ome_alert_policies_category_info](modules/ome_alert_policies_category_info.rst)
-- [ome_application_alerts_smtp](modules/ome_application_alerts_smtp.rst)
-- [ome_application_alerts_syslog](modules/ome_application_alerts_syslog.rst)
-- [ome_application_certificate](modules/ome_application_certificate.rst)
-- [ome_application_console_preferences](modules/ome_application_console_preferences.rst)
-- [ome_application_network_address](modules/ome_application_network_address.rst)
-- [ome_application_network_proxy](modules/ome_application_network_proxy.rst)
-- [ome_application_network_settings](modules/ome_application_network_settings.rst)
-- [ome_application_network_time](modules/ome_application_network_time.rst)
-- [ome_application_network_webserver](modules/ome_application_network_webserver.rst)
-- [ome_application_security_settings](modules/ome_application_security_settings.rst)
-- [ome_chassis_slots](modules/ome_chassis_slots.rst)
-- [ome_configuration_compliance_baseline](modules/ome_configuration_compliance_baseline.rst)
-- [ome_configuration_compliance_info](modules/ome_configuration_compliance_info.rst)
-- [ome_device_group](modules/ome_device_group.rst)
-- [ome_device_info](modules/ome_device_info.rst)
-- [ome_device_local_access_configuration](modules/ome_device_local_access_configuration.rst)
-- [ome_device_location](modules/ome_device_location.rst)
-- [ome_device_mgmt_network](modules/ome_device_mgmt_network.rst)
-- [ome_device_network_services](modules/ome_device_network_services.rst)
-- [ome_device_power_settings](modules/ome_device_power_settings.rst)
-- [ome_device_quick_deploy](modules/ome_device_quick_deploy.rst)
-- [ome_devices](modules/ome_devices.rst)
-- [ome_diagnostics](modules/ome_diagnostics.rst)
-- [ome_discovery](modules/ome_discovery.rst)
-- [ome_domain_user_groups](modules/ome_domain_user_groups.rst)
-- [ome_firmware](modules/ome_firmware.rst)
-- [ome_firmware_baseline](modules/ome_firmware_baseline.rst)
-- [ome_firmware_baseline_compliance_info](modules/ome_firmware_baseline_compliance_info.rst)
-- [ome_firmware_baseline_info](modules/ome_firmware_baseline_info.rst)
-- [ome_firmware_catalog](modules/ome_firmware_catalog.rst)
-- [ome_groups](modules/ome_groups.rst)
-- [ome_identity_pool](modules/ome_identity_pool.rst)
-- [ome_job_info](modules/ome_job_info.rst)
-- [ome_network_port_breakout](modules/ome_network_port_breakout.rst)
-- [ome_network_vlan](modules/ome_network_vlan.rst)
-- [ome_network_vlan_info](modules/ome_network_vlan_info.rst)
-- [ome_powerstate](modules/ome_powerstate.rst)
-- [ome_profile](modules/ome_profile.rst)
-- [ome_profile_info](modules/ome_profile_info.rst)
-- [ome_server_interface_profile_info](modules/ome_server_interface_profile_info.rst)
-- [ome_server_interface_profiles](modules/ome_server_interface_profiles.rst)
-- [ome_smart_fabric_info](modules/ome_smart_fabric_info.rst)
-- [ome_smart_fabric](modules/ome_smart_fabric.rst)
-- [ome_smart_fabric_uplink_info](modules/ome_smart_fabric_uplink_info.rst)
-- [ome_smart_fabric_uplink](modules/ome_smart_fabric_uplink.rst)
-- [ome_template](modules/ome_template.rst)
-- [ome_template_identity_pool](modules/ome_template_identity_pool.rst)
-- [ome_template_info](modules/ome_template_info.rst)
-- [ome_template_network_vlan](modules/ome_template_network_vlan.rst)
-- [ome_template_network_vlan_info](modules/ome_template_network_vlan_info.rst)
-- [ome_user](modules/ome_user.rst)
-- [ome_user_info](modules/ome_user_info.rst)
+
+|  Module Name |
+| ------------------------------------------------------------------------------------------ |
+| [ome_active_directory](modules/ome_active_directory.rst) |
+| [ome_alert_policies](modules/ome_alert_policies.rst) |
+| [ome_alert_policies_message_id_info](modules/ome_alert_policies_message_id_info.rst) |
+| [ome_alert_policies_info](modules/ome_alert_policies_info.rst) |
+| [ome_alert_policies_actions_info](modules/ome_alert_policies_actions_info.rst) |
+| [ome_alert_policies_category_info](modules/ome_alert_policies_category_info.rst) |
+| [ome_application_alerts_smtp](modules/ome_application_alerts_smtp.rst) |
+| [ome_application_alerts_syslog](modules/ome_application_alerts_syslog.rst) |
+| [ome_application_certificate](modules/ome_application_certificate.rst) |
+| [ome_application_console_preferences](modules/ome_application_console_preferences.rst) |
+| [ome_application_network_address](modules/ome_application_network_address.rst) |
+| [ome_application_network_proxy](modules/ome_application_network_proxy.rst) |
+| [ome_application_network_settings](modules/ome_application_network_settings.rst) |
+| [ome_application_network_time](modules/ome_application_network_time.rst) |
+| [ome_application_network_webserver](modules/ome_application_network_webserver.rst) |
+| [ome_application_security_settings](modules/ome_application_security_settings.rst) |
+| [ome_chassis_slots](modules/ome_chassis_slots.rst) |
+| [ome_configuration_compliance_baseline](modules/ome_configuration_compliance_baseline.rst) |
+| [ome_configuration_compliance_info](modules/ome_configuration_compliance_info.rst) |
+| [ome_device_group](modules/ome_device_group.rst) |
+| [ome_device_info](modules/ome_device_info.rst) |
+| [ome_device_local_access_configuration](modules/ome_device_local_access_configuration.rst) |
+| [ome_device_location](modules/ome_device_location.rst) |
+| [ome_device_mgmt_network](modules/ome_device_mgmt_network.rst) |
+| [ome_device_network_services](modules/ome_device_network_services.rst) |
+| [ome_device_power_settings](modules/ome_device_power_settings.rst) |
+| [ome_device_quick_deploy](modules/ome_device_quick_deploy.rst) |
+| [ome_devices](modules/ome_devices.rst) |
+| [ome_diagnostics](modules/ome_diagnostics.rst) |
+| [ome_discovery](modules/ome_discovery.rst) |
+| [ome_domain_user_groups](modules/ome_domain_user_groups.rst) |
+| [ome_firmware](modules/ome_firmware.rst) |
+| [ome_firmware_baseline](modules/ome_firmware_baseline.rst) |
+| [ome_firmware_baseline_compliance_info](modules/ome_firmware_baseline_compliance_info.rst) |
+| [ome_firmware_baseline_info](modules/ome_firmware_baseline_info.rst) |
+| [ome_firmware_catalog](modules/ome_firmware_catalog.rst) |
+| [ome_groups](modules/ome_groups.rst) |
+| [ome_identity_pool](modules/ome_identity_pool.rst) |
+| [ome_job_info](modules/ome_job_info.rst) |
+| [ome_network_port_breakout](modules/ome_network_port_breakout.rst) |
+| [ome_network_vlan](modules/ome_network_vlan.rst) |
+| [ome_network_vlan_info](modules/ome_network_vlan_info.rst) |
+| [ome_powerstate](modules/ome_powerstate.rst) |
+| [ome_profile](modules/ome_profile.rst) |
+| [ome_profile_info](modules/ome_profile_info.rst) |
+| [ome_server_interface_profile_info](modules/ome_server_interface_profile_info.rst) |
+| [ome_server_interface_profiles](modules/ome_server_interface_profiles.rst) |
+| [ome_smart_fabric_info](modules/ome_smart_fabric_info.rst) |
+| [ome_smart_fabric](modules/ome_smart_fabric.rst) |
+| [ome_smart_fabric_uplink_info](modules/ome_smart_fabric_uplink_info.rst) |
+| [ome_smart_fabric_uplink](modules/ome_smart_fabric_uplink.rst) |
+| [ome_template](modules/ome_template.rst) |
+| [ome_template_identity_pool](modules/ome_template_identity_pool.rst) |
+| [ome_template_info](modules/ome_template_info.rst) |
+| [ome_template_network_vlan](modules/ome_template_network_vlan.rst) |
+| [ome_template_network_vlan_info](modules/ome_template_network_vlan_info.rst) |
+| [ome_user](modules/ome_user.rst) |
+| [ome_user_info](modules/ome_user_info.rst) | \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst
new file mode 100644
index 000000000..1a29769e0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst
@@ -0,0 +1,390 @@
+.. _idrac_diagnostics_module:
+
+
+idrac_diagnostics -- Run and Export iDRAC diagnostics
+=====================================================
+
+.. contents::
+ :local:
+ :depth: 1
+
+
+Synopsis
+--------
+
+This module allows you to run and export diagnostics on iDRAC.
+
+
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- python \>= 3.9.6
+
+
+
+Parameters
+----------
+
+ run (optional, bool, None)
+ Run the diagnostics job on iDRAC.
+
+ Run the diagnostics job based on the \ :emphasis:`run\_mode`\ and save the report in the internal storage. \ :emphasis:`reboot\_type`\ is applicable.
+
+
+ export (optional, bool, None)
+ Exports the diagnostics information to the given share.
+
+ This operation requires \ :emphasis:`share\_parameters`\ .
+
+ When \ :emphasis:`run`\ is \ :literal:`true`\ and \ :emphasis:`job\_wait`\ is \ :literal:`false`\ , only then the run diagnostics job is triggered. \ :emphasis:`export`\ is ignored.
+
+
+ run_mode (optional, str, express)
+ This option provides the choices to run the diagnostics.
+
+ \ :literal:`express`\ The express diagnostics runs a test package for each server subsystem. However, it does not run the complete set of tests available in the package for each subsystem.
+
+ \ :literal:`extended`\ The extended diagnostics run all available tests in each test package for all subsystems.
+
+ \ :literal:`long\_run`\ The long-run diagnostics runs express and extended tests.
+
+
+ reboot_type (optional, str, graceful)
+ This option provides the choice to reboot the host immediately to run the diagnostics.
+
+ This is applicable when \ :emphasis:`run`\ is \ :literal:`true`\ .
+
+ \ :literal:`force`\ Forced graceful shutdown signals the operating system to turn off and wait for ten minutes. If the operating system does not turn off, the iDRAC power cycles the system.
+
+ \ :literal:`graceful`\ Graceful shutdown waits for the operating system to turn off and wait for the system to restart.
+
+ \ :literal:`power\_cycle`\ performs a power cycle for a hard reset on the device.
+
+
+ scheduled_start_time (optional, str, None)
+ Schedules the job at the specified time.
+
+ The accepted formats are yyyymmddhhmmss and YYYY-MM-DDThh:mm:ss+HH:MM.
+
+ This is applicable when \ :emphasis:`run`\ is \ :literal:`true`\ and \ :emphasis:`reboot\_type`\ is power\_cycle.
+
+
+ scheduled_end_time (optional, str, None)
+ Run the diagnostic until the specified end date and end time after the \ :emphasis:`scheduled\_start\_time`\ .
+
+ The accepted formats are yyyymmddhhmmss and YYYY-MM-DDThh:mm:ss+HH:MM.
+
+ If the run operation does not complete before the specified end time, then the operation fails.
+
+ This is applicable when \ :emphasis:`run`\ is \ :literal:`True`\ and \ :emphasis:`reboot\_type`\ is \ :literal:`power\_cycle`\ .
+
+
+ job_wait (optional, bool, True)
+ Provides the option to wait for job completion.
+
+ This is applicable when \ :emphasis:`run`\ is \ :literal:`true`\ and \ :emphasis:`reboot\_type`\ is \ :literal:`power\_cycle`\ .
+
+ This is applicable only to run the diagnostics job.
+
+
+ job_wait_timeout (optional, int, 1200)
+ Time in seconds to wait for job completion.
+
+ This is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
+
+
+ share_parameters (optional, dict, None)
+ Parameters that are required for the export operation of diagnostics.
+
+ \ :emphasis:`share\_parameters`\ is required when \ :emphasis:`export`\ is \ :literal:`true`\ .
+
+
+ share_type (optional, str, local)
+ Share type of the network share.
+
+ \ :literal:`local`\ uses local path for \ :emphasis:`export`\ operation.
+
+ \ :literal:`nfs`\ uses NFS share for \ :emphasis:`export`\ operation.
+
+ \ :literal:`cifs`\ uses CIFS share for \ :emphasis:`export`\ operation.
+
+ \ :literal:`http`\ uses HTTP share for \ :emphasis:`export`\ operation.
+
+ \ :literal:`https`\ uses HTTPS share for \ :emphasis:`export`\ operation.
+
+
+ file_name (optional, str, None)
+ Diagnostics file name for \ :emphasis:`export`\ operation.
+
+
+ ip_address (optional, str, None)
+ IP address of the network share.
+
+ \ :emphasis:`ip\_address`\ is required when \ :emphasis:`share\_type`\ is \ :literal:`nfs`\ , \ :literal:`cifs`\ , \ :literal:`http`\ or \ :literal:`https`\ .
+
+
+ share_name (optional, str, None)
+ Network share or local path of the diagnostics file.
+
+
+ workgroup (optional, str, None)
+ Workgroup of the network share.
+
+ \ :emphasis:`workgroup`\ is applicable only when \ :emphasis:`share\_type`\ is \ :literal:`cifs`\ .
+
+
+ username (optional, str, None)
+ Username of the network share.
+
+ \ :emphasis:`username`\ is required when \ :emphasis:`share\_type`\ is \ :literal:`cifs`\ .
+
+
+ password (optional, str, None)
+ Password of the network share.
+
+ \ :emphasis:`password`\ is required when \ :emphasis:`share\_type`\ is \ :literal:`cifs`\ .
+
+
+ ignore_certificate_warning (optional, str, off)
+ Ignores the certificate warning while connecting to Share and is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`https`\ .
+
+ \ :literal:`off`\ ignores the certificate warning.
+
+ \ :literal:`on`\ does not ignore the certificate warning.
+
+
+ proxy_support (optional, str, off)
+ Specifies if proxy support must be used or not.
+
+ \ :literal:`off`\ does not use proxy settings.
+
+ \ :literal:`default\_proxy`\ uses the default proxy settings.
+
+ \ :literal:`parameters\_proxy`\ uses the specified proxy settings. \ :emphasis:`proxy\_server`\ is required when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+ \ :emphasis:`proxy\_support`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ .
+
+
+ proxy_type (optional, str, http)
+ The proxy type of the proxy server.
+
+ \ :literal:`http`\ to select HTTP proxy.
+
+ \ :literal:`socks`\ to select SOCKS proxy.
+
+ \ :emphasis:`proxy\_type`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ and when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+
+ proxy_server (optional, str, None)
+ The IP address of the proxy server.
+
+ \ :emphasis:`proxy\_server`\ is required when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+ \ :emphasis:`proxy\_server`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ and when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+
+ proxy_port (optional, int, 80)
+ The port of the proxy server.
+
+ \ :emphasis:`proxy\_port`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ and when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+
+ proxy_username (optional, str, None)
+ The username of the proxy server.
+
+ \ :emphasis:`proxy\_username`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ and when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+
+ proxy_password (optional, str, None)
+ The password of the proxy server.
+
+ \ :emphasis:`proxy\_password`\ is only applicable when \ :emphasis:`share\_type`\ is \ :literal:`http`\ or \ :literal:`https`\ and when \ :emphasis:`proxy\_support`\ is \ :literal:`parameters\_proxy`\ .
+
+
+
+ resource_id (optional, str, None)
+ Id of the resource.
+
+ If the value for resource ID is not provided, the module picks the first resource ID available from the list of system resources returned by the iDRAC.
+
+
+ idrac_ip (True, str, None)
+ iDRAC IP Address.
+
+
+ idrac_user (True, str, None)
+ iDRAC username.
+
+
+ idrac_password (True, str, None)
+ iDRAC user password.
+
+
+ idrac_port (optional, int, 443)
+ iDRAC port.
+
+
+ validate_certs (optional, bool, True)
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
+
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
+
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
+
+
+ ca_path (optional, path, None)
+ The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+
+
+ timeout (optional, int, 30)
+ The socket level timeout in seconds.
+
+
+
+
+
+Notes
+-----
+
+.. note::
+ - Run this module from a system that has direct access to Dell iDRAC.
+ - This module supports only iDRAC9 and above.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports \ :literal:`check\_mode`\ .
+ - This module requires 'Dell Diagnostics' firmware package to be present on the server.
+ - When \ :emphasis:`share\_type`\ is \ :literal:`local`\ for \ :emphasis:`export`\ operation, job\_details are not displayed.
+
+
+
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ ---
+ - name: Run and export the diagnostics to local path
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "local"
+ share_path: "/opt/local/diagnostics/"
+ file_name: "diagnostics.txt"
+
+ - name: Run the diagnostics with power cycle reboot on schedule
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ run_mode: "express"
+ reboot_type: "power_cycle"
+ scheduled_start_time: 20240101101015
+
+ - name: Run and export the diagnostics to HTTPS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ ignore_certificate_warning: "on"
+ share_name: "/share_path/diagnostics_collection_path"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+
+ - name: Run and export the diagnostics to NFS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "NFS"
+ share_name: "nfsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.3"
+ file_name: "diagnostics.txt"
+
+ - name: Export the diagnostics to CIFS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "CIFS"
+ share_name: "/cifsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.4"
+ file_name: "diagnostics.txt"
+
+ - name: Export the diagnostics to HTTPS share via proxy
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ share_name: "/share_path/diagnostics_collection_path"
+ ignore_certificate_warning: "on"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+ proxy_support: parameters_proxy
+ proxy_type: http
+ proxy_server: "192.168.0.5"
+ proxy_port: 1080
+ proxy_username: "proxy_user"
+ proxy_password: "proxy_password"
+
+
+
+Return Values
+-------------
+
+msg (always, str, Successfully ran and exported the diagnostics.)
+ Status of the diagnostics operation.
+
+
+job_details (For run and export operations, dict, {'ActualRunningStartTime': '2024-01-10T10:14:31', 'ActualRunningStopTime': '2024-01-10T10:26:34', 'CompletionTime': '2024-01-10T10:26:34', 'Description': 'Job Instance', 'EndTime': '2024-01-10T10:30:15', 'Id': 'JID_XXXXXXXXXXXX', 'JobState': 'Completed', 'JobType': 'RemoteDiagnostics', 'Message': 'Job completed successfully.', 'MessageArgs': [], 'MessageArgs@odata.count': 0, 'MessageId': 'SYS018', 'Name': 'Remote Diagnostics', 'PercentComplete': 100, 'StartTime': '2024-01-10T10:12:15', 'TargetSettingsURI': None})
+ Returns the output for status of the job.
+
+
+diagnostics_file_path (For export operation, str, /share_path/diagnostics_collection_path/diagnostics.txt)
+ Returns the full path of the diagnostics file.
+
+
+error_info (on HTTP error, dict, {'error': {'code': 'Base.1.12.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'Message': 'A Remote Diagnostic (ePSA) job already exists.', 'MessageArgs': [], 'MessageArgs@odata.count': 0, 'MessageId': 'IDRAC.2.9.SYS098', 'RelatedProperties': [], 'RelatedProperties@odata.count': 0, 'Resolution': 'A response action is not required if the scheduled start time of the existing Remote Diagnostic (ePSA) job is ok. Else, delete the existing Diagnostics (ePSA) job and recreate another with an appropriate start time.', 'Severity': 'Informational'}]}})
+ Details of the HTTP Error.
+
+
+
+
+
+Status
+------
+
+
+
+
+
+Authors
+~~~~~~~
+
+- Shivam Sharma(@ShivamSh3)
+
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
index e0efdc9f9..85ec3ca3c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
@@ -1,8 +1,8 @@
.. _idrac_reset_module:
-idrac_reset -- Reset iDRAC
-==========================
+idrac_reset -- Factory reset the iDRACs
+=======================================
.. contents::
:local:
@@ -12,9 +12,7 @@ idrac_reset -- Reset iDRAC
Synopsis
--------
-This module resets iDRAC.
-
-iDRAC is not accessible for some time after running this module. It is recommended to wait for some time, before trying to connect to iDRAC.
+This module resets the iDRAC to factory default settings.
@@ -22,14 +20,57 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- python \>= 3.9.6
Parameters
----------
+ reset_to_default (optional, str, None)
+ If this value is not set the default behaviour is to restart the iDRAC.
+
+ \ :literal:`All`\ Discards all settings and reset to default credentials.
+
+ \ :literal:`ResetAllWithRootDefaults`\ Discards all settings and reset the default username to root and password to the shipping value.
+
+ \ :literal:`Default`\ Discards all settings, but preserves user and network settings.
+
+ \ :literal:`CustomDefaults`\ All configuration is set to custom defaults.This option is supported on firmware version 7.00.00.00 and newer versions.
+
+
+ custom_defaults_file (optional, str, None)
+ Name of the custom default configuration file in the XML format.
+
+ This option is applicable when \ :emphasis:`reset\_to\_default`\ is \ :literal:`CustomDefaults`\ .
+
+ \ :emphasis:`custom\_defaults\_file`\ is mutually exclusive with \ :emphasis:`custom\_defaults\_buffer`\ .
+
+
+ custom_defaults_buffer (optional, str, None)
+ This parameter provides the option to import the buffer input in XML format as a custom default configuration.
+
+ This option is applicable when \ :emphasis:`reset\_to\_default`\ is \ :literal:`CustomDefaults`\ .
+
+ \ :emphasis:`custom\_defaults\_buffer`\ is mutually exclusive with \ :emphasis:`custom\_defaults\_file`\ .
+
+
+ wait_for_idrac (optional, bool, True)
+ This parameter provides the option to wait for the iDRAC to reset and lifecycle controller status to be ready.
+
+
+ job_wait_timeout (optional, int, 600)
+ Time in seconds to wait for job completion.
+
+ This is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
+
+
+ force_reset (optional, bool, False)
+ This parameter provides the option to force reset the iDRAC without checking the iDRAC lifecycle controller status.
+
+ This option is applicable only for iDRAC9.
+
+
idrac_ip (True, str, None)
iDRAC IP Address.
@@ -37,21 +78,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :literal:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :literal:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -70,8 +119,10 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
+ - If reset\_to\_default option is not specified, then this module triggers a graceful restart.
+ - This module skips the execution if reset options are not supported by the iDRAC.
@@ -83,13 +134,57 @@ Examples
---
- - name: Reset iDRAC
+ - name: Reset the iDRAC to all and wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "All"
+
+ - name: Reset the iDRAC to default and do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ wait_for_idrac: false
+
+ - name: Force reset the iDRAC to default.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ force_reset: true
+
+ - name: Gracefully restart the iDRAC.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Reset the iDRAC to custom defaults XML and do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_file: "/path/to/custom_defaults.xml"
+
+ - name: Reset the iDRAC to custom defaults buffer input and do not wait till the iDRAC is accessible.
dellemc.openmanage.idrac_reset:
- idrac_ip: "192.168.0.1"
- idrac_user: "user_name"
- idrac_password: "user_password"
- idrac_port: 443
- ca_path: "/path/to/ca_cert.pem"
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_buffer: "<SystemConfiguration Model=\"PowerEdge R7525\" ServiceTag=\"ABCD123\">\n<Component FQDD=\"iDRAC.Embedded.1\">\n
+ <Attribute Name=\"IPMILan.1#Enable\">Disabled</Attribute>\n </Component>\n\n</SystemConfiguration>"
@@ -100,7 +195,7 @@ msg (always, str, Successfully performed iDRAC reset.)
Status of the iDRAC reset operation.
-reset_status (always, dict, {'idracreset': {'Data': {'StatusCode': 204}, 'Message': 'none', 'Status': 'Success', 'StatusCode': 204, 'retval': True}})
+reset_status (reset operation is triggered., dict, {'idracreset': {'Data': {'StatusCode': 204}, 'Message': 'none', 'Status': 'Success', 'StatusCode': 204, 'retval': True}})
Details of iDRAC reset operation.
@@ -123,4 +218,5 @@ Authors
- Felix Stephen (@felixs88)
- Anooja Vardhineni (@anooja-vardhineni)
+- Lovepreet Singh (@singh-lovepreet1)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst
new file mode 100644
index 000000000..d8f980043
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst
@@ -0,0 +1,157 @@
+.. _idrac_session_module:
+
+
+idrac_session -- Manage iDRAC sessions
+======================================
+
+.. contents::
+ :local:
+ :depth: 1
+
+
+Synopsis
+--------
+
+This module allows the creation and deletion of sessions on iDRAC.
+
+
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- python \>= 3.9.6
+
+
+
+Parameters
+----------
+
+ hostname (optional, str, None)
+ IP address or hostname of the iDRAC.
+
+
+ username (optional, str, None)
+ Username of the iDRAC.
+
+ \ :emphasis:`username`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ password (optional, str, None)
+ Password of the iDRAC.
+
+ \ :emphasis:`password`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ port (optional, int, 443)
+ Port of the iDRAC.
+
+
+ validate_certs (optional, bool, True)
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
+
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
+
+
+ ca_path (optional, path, None)
+ The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+
+
+ timeout (optional, int, 30)
+ The https socket level timeout in seconds.
+
+
+ state (optional, str, present)
+ The state of the session in an iDRAC.
+
+ \ :literal:`present`\ creates a session.
+
+ \ :literal:`absent`\ deletes a session.
+
+ Module will always report changes found to be applied when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ auth_token (optional, str, None)
+ Authentication token.
+
+ \ :emphasis:`auth\_token`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
+
+
+ session_id (optional, int, None)
+ Session ID of the iDRAC.
+
+ \ :emphasis:`session\_id`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
+
+
+
+
+
+Notes
+-----
+
+.. note::
+ - Run this module from a system that has direct access to Dell iDRAC.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports \ :literal:`check\_mode`\ .
+ - This module will always report changes found to be applied when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ ---
+ - name: Create a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ state: present
+
+ - name: Delete a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ state: absent
+ auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_is: 2
+
+
+
+Return Values
+-------------
+
+msg (always, str, The session has been created successfully.)
+ Status of the session operation.
+
+
+session_data (For session creation operation, dict, {'@Message.ExtendedInfo': [{'Message': 'The resource has been created successfully.', 'MessageArgs': [], 'MessageId': 'Base.1.12.Created', 'RelatedProperties': [], 'Resolution': 'None.', 'Severity': 'OK'}, {'Message': 'A new resource is successfully created.', 'MessageArgs': [], 'MessageId': 'IDRAC.2.9.SYS414', 'RelatedProperties': [], 'Resolution': 'No response action is required.', 'Severity': 'Informational'}], 'ClientOriginIPAddress': '100.96.37.58', 'CreatedTime': '2024-04-05T01:14:01-05:00', 'Description': 'User Session', 'Id': '74', 'Name': 'User Session', 'Password': None, 'SessionType': 'Redfish', 'UserName': 'root'})
+ The session details.
+
+
+x_auth_token (For session creation operation, str, d15f17f01cd627c30173b1582642497d)
+ Authentication token.
+
+
+error_info (On HTTP error, dict, {'error': {'@Message.ExtendedInfo': [{'Message': 'Unable to complete the operation because an invalid username and/or password is entered, and therefore authentication failed.', 'MessageArgs': [], 'MessageId': 'IDRAC.2.9.SYS415', 'RelatedProperties': [], 'Resolution': 'Enter valid user name and password and retry the operation.', 'Severity': 'Warning'}], 'code': 'Base.1.12.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information'}})
+ Details of the HTTP Error.
+
+
+
+
+
+Status
+------
+
+
+
+
+
+Authors
+~~~~~~~
+
+- Rajshekar P(@rajshekarp87)
+
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst
new file mode 100644
index 000000000..5cfefc036
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst
@@ -0,0 +1,310 @@
+.. _idrac_storage_volume_module:
+
+
+idrac_storage_volume -- Configures the RAID configuration attributes
+====================================================================
+
+.. contents::
+ :local:
+ :depth: 1
+
+
+Synopsis
+--------
+
+This module is responsible for configuring the RAID attributes.
+
+
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- python \>= 3.9.6
+
+
+
+Parameters
+----------
+
+ state (optional, str, view)
+ \ :literal:`create`\ , performs create volume operation.
+
+ \ :literal:`delete`\ , performs remove volume operation.
+
+ \ :literal:`view`\ , returns storage view.
+
+
+ span_depth (optional, int, 1)
+ Number of spans in the RAID configuration.
+
+ \ :emphasis:`span\_depth`\ is required for \ :literal:`create`\ and its value depends on \ :emphasis:`volume\_type`\ .
+
+
+ span_length (optional, int, 1)
+ Number of disks in a span.
+
+ \ :emphasis:`span\_length`\ is required for \ :literal:`create`\ and its value depends on \ :emphasis:`volume\_type`\ .
+
+
+ number_dedicated_hot_spare (optional, int, 0)
+ Number of Dedicated Hot Spare.
+
+
+ volume_type (optional, str, RAID 0)
+ Provide the the required RAID level.
+
+
+ disk_cache_policy (optional, str, Default)
+ Disk Cache Policy.
+
+
+ write_cache_policy (optional, str, WriteThrough)
+ Write cache policy.
+
+
+ read_cache_policy (optional, str, NoReadAhead)
+ Read cache policy.
+
+
+ stripe_size (optional, int, 65536)
+ Stripe size value to be provided in multiples of 64 \* 1024.
+
+
+ controller_id (optional, str, None)
+ Fully Qualified Device Descriptor (FQDD) of the storage controller, for example 'RAID.Integrated.1-1'. Controller FQDD is required for \ :literal:`create`\ RAID configuration.
+
+
+ media_type (optional, str, None)
+ Media type.
+
+
+ protocol (optional, str, None)
+ Bus protocol.
+
+
+ volume_id (optional, str, None)
+ Fully Qualified Device Descriptor (FQDD) of the virtual disk, for example 'Disk.virtual.0:RAID.Slot.1-1'. This option is used to get the virtual disk information.
+
+
+ volumes (optional, list, None)
+ A list of virtual disk specific iDRAC attributes. This is applicable for \ :literal:`create`\ and \ :literal:`delete`\ operations.
+
+ For \ :literal:`create`\ operation, name and drives are applicable options, other volume options can also be specified.
+
+ The drives is a required option for \ :literal:`create`\ operation and accepts either location (list of drive slot) or id (list of drive fqdd).
+
+ In iDRAC8, there is no pre-validation for the state of drives. The disk ID or slot number of the drive provided may or may not be in Ready state. Enter the disk ID or slot number of the drive that is already in Ready state.
+
+ For \ :literal:`delete`\ operation, only name option is applicable.
+
+ See the examples for more details.
+
+
+ capacity (optional, float, None)
+ Virtual disk size in GB.
+
+
+ raid_reset_config (optional, str, false)
+ This option represents whether a reset config operation needs to be performed on the RAID controller. Reset Config operation deletes all the virtual disks present on the RAID controller.
+
+
+ raid_init_operation (optional, str, None)
+ This option represents initialization configuration operation to be performed on the virtual disk.
+
+
+ job_wait (optional, bool, True)
+ This parameter provides the option to wait for the job completion.
+
+ This is applicable when \ :emphasis:`state`\ is \ :literal:`create`\ or \ :literal:`delete`\ .
+
+
+ job_wait_timeout (optional, int, 900)
+ This parameter is the maximum wait time of \ :emphasis:`job\_wait`\ in seconds.
+
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
+
+
+ idrac_ip (True, str, None)
+ iDRAC IP Address.
+
+
+ idrac_user (True, str, None)
+ iDRAC username.
+
+ If the username is not provided, then the environment variable \ :literal:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (True, str, None)
+ iDRAC user password.
+
+ If the password is not provided, then the environment variable \ :literal:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ idrac_port (optional, int, 443)
+ iDRAC port.
+
+
+ validate_certs (optional, bool, True)
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
+
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
+
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
+
+
+ ca_path (optional, path, None)
+ The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+
+
+ timeout (optional, int, 30)
+ The socket level timeout in seconds.
+
+
+
+
+
+Notes
+-----
+
+.. note::
+ - Run this module from a system that has direct access to Integrated Dell Remote Access Controller.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
+ - This module does not display the controller battery details for the \ :literal:`view`\ operation of the storage in iDRAC8.
+
+
+
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ ---
+ - name: Create single volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "create"
+ controller_id: "RAID.Slot.1-1"
+ volumes:
+ - drives:
+ location: [5]
+
+ - name: Create multiple volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ raid_reset_config: "True"
+ state: "create"
+ controller_id: "RAID.Slot.1-1"
+ volume_type: "RAID 1"
+ span_depth: 1
+ span_length: 2
+ number_dedicated_hot_spare: 1
+ disk_cache_policy: "Enabled"
+ write_cache_policy: "WriteBackForce"
+ read_cache_policy: "ReadAhead"
+ stripe_size: 65536
+ capacity: 100
+ raid_init_operation: "Fast"
+ volumes:
+ - name: "volume_1"
+ drives:
+ id: ["Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1", "Disk.Bay.2:Enclosure.Internal.0-1:RAID.Slot.1-1"]
+ - name: "volume_2"
+ volume_type: "RAID 5"
+ span_length: 3
+ span_depth: 1
+ drives:
+ location: [7, 3, 5]
+ disk_cache_policy: "Disabled"
+ write_cache_policy: "WriteBack"
+ read_cache_policy: "NoReadAhead"
+ stripe_size: 131072
+ capacity: "200"
+ raid_init_operation: "None"
+
+ - name: View all volume details
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "view"
+
+ - name: View specific volume details
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "view"
+ controller_id: "RAID.Slot.1-1"
+ volume_id: "Disk.Virtual.0:RAID.Slot.1-1"
+
+ - name: Delete single volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "delete"
+ volumes:
+ - name: "volume_1"
+
+ - name: Delete multiple volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "delete"
+ volumes:
+ - name: "volume_1"
+ - name: "volume_2"
+
+
+
+Return Values
+-------------
+
+msg (always, str, Successfully completed the view storage volume operation)
+ Overall status of the storage configuration operation.
+
+
+storage_status (success, dict, {'Id': 'JID_XXXXXXXXX', 'JobState': 'Completed', 'JobType': 'ImportConfiguration', 'Message': 'Successfully imported and applied Server Configuration Profile.', 'MessageId': 'XXX123', 'Name': 'Import Configuration', 'PercentComplete': 100, 'StartTime': 'TIME_NOW', 'TargetSettingsURI': None})
+ Storage configuration job and progress details from the iDRAC.
+
+
+error_info (on HTTP error, dict, {'error': {'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'MessageId': 'GEN1234', 'RelatedProperties': [], 'Message': 'Unable to process the request because an error occurred.', 'MessageArgs': [], 'Severity': 'Critical', 'Resolution': 'Retry the operation. If the issue persists, contact your system administrator.'}]}})
+ Details of the HTTP Error.
+
+
+
+
+
+Status
+------
+
+
+
+
+
+Authors
+~~~~~~~
+
+- Felix Stephen (@felixs88)
+- Kritika Bhateja (@Kritika-Bhateja-03)
+- Abhishek Sinha(@ABHISHEK-SINHA10)
+
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
index 79ce3f7d4..45292acd0 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -42,17 +42,17 @@ Parameters
health_check_interval_unit (optional, str, None)
The time unit of the frequency at which the device health must be recorded and data stored.
- ``Hourly`` to set the frequency in hours.
+ \ :literal:`Hourly`\ to set the frequency in hours.
- ``Minutes`` to set the frequency in minutes.
+ \ :literal:`Minutes`\ to set the frequency in minutes.
health_and_power_state_on_connection_lost (optional, str, None)
The latest recorded device health.
- ``last_known`` to display the latest recorded device health when the power connection was lost.
+ \ :literal:`last\_known`\ to display the latest recorded device health when the power connection was lost.
- ``unknown`` to display the latest recorded device health when the device status moved to unknown.
+ \ :literal:`unknown`\ to display the latest recorded device health when the device status moved to unknown.
@@ -63,17 +63,17 @@ Parameters
general_device_naming (optional, str, DNS)
Applicable to all the discovered devices other than the iDRACs.
- ``DNS`` to use the DNS name.
+ \ :literal:`DNS`\ to use the DNS name.
- ``NETBIOS`` to use the NetBIOS name.
+ \ :literal:`NETBIOS`\ to use the NetBIOS name.
server_device_naming (optional, str, IDRAC_SYSTEM_HOSTNAME)
Applicable to iDRACs only.
- ``IDRAC_HOSTNAME`` to use the iDRAC hostname.
+ \ :literal:`IDRAC\_HOSTNAME`\ to use the iDRAC hostname.
- ``IDRAC_SYSTEM_HOSTNAME`` to use the system hostname.
+ \ :literal:`IDRAC\_SYSTEM\_HOSTNAME`\ to use the system hostname.
invalid_device_hostname (optional, str, None)
@@ -92,9 +92,9 @@ Parameters
device_discovery_approval_policy (optional, str, None)
Discovery approval policies.
- ``Automatic`` allows servers with iDRAC Firmware version 4.00.00.00, which are on the same network as the console, to be discovered automatically by the console.
+ \ :literal:`Automatic`\ allows servers with iDRAC Firmware version 4.00.00.00, which are on the same network as the console, to be discovered automatically by the console.
- ``Manual`` for the servers to be discovered by the user manually.
+ \ :literal:`Manual`\ for the servers to be discovered by the user manually.
set_trap_destination (optional, bool, None)
@@ -105,9 +105,9 @@ Parameters
mx7000_onboarding_preferences (optional, str, None)
Alert-forwarding behavior on chassis when they are onboarded.
- ``all`` to receive all alert.
+ \ :literal:`all`\ to receive all alert.
- ``chassis`` to receive chassis category alerts only.
+ \ :literal:`chassis`\ to receive chassis category alerts only.
builtin_appliance_share (optional, dict, None)
@@ -117,19 +117,19 @@ Parameters
share_options (optional, str, None)
The share options.
- ``CIFS`` to select CIFS share type.
+ \ :literal:`CIFS`\ to select CIFS share type.
- ``HTTPS`` to select HTTPS share type.
+ \ :literal:`HTTPS`\ to select HTTPS share type.
cifs_options (optional, str, None)
The SMB protocol version.
- *cifs_options* is required *share_options* is ``CIFS``.
+ \ :emphasis:`cifs\_options`\ is required \ :emphasis:`share\_options`\ is \ :literal:`CIFS`\ .
- ``V1`` to enable SMBv1.
+ \ :literal:`V1`\ to enable SMBv1.
- ``V2`` to enable SMBv2
+ \ :literal:`V2`\ to enable SMBv2
@@ -140,9 +140,9 @@ Parameters
trap_forwarding_format (optional, str, None)
The trap forwarding format.
- ``Original`` to retain the trap data as is.
+ \ :literal:`Original`\ to retain the trap data as is.
- ``Normalized`` to normalize the trap data.
+ \ :literal:`Normalized`\ to normalize the trap data.
metrics_collection_settings (optional, int, None)
@@ -166,11 +166,11 @@ Parameters
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -188,7 +188,7 @@ Notes
-----
.. note::
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -311,4 +311,5 @@ Authors
- Sachin Apagundi(@sachin-apa)
- Husniya Hameed (@husniya-hameed)
+- ShivamSh3 (@ShivamSh3)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
index a5846243e..1deafefb0 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,17 +30,17 @@ Parameters
device_id (optional, int, None)
The ID of the chassis for which the local access configuration to be updated.
- If the device ID is not specified, this module updates the local access settings for the *hostname*.
+ If the device ID is not specified, this module updates the local access settings for the \ :emphasis:`hostname`\ .
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
The service tag of the chassis for which the local access settings needs to be updated.
- If the device service tag is not specified, this module updates the local access settings for the *hostname*.
+ If the device service tag is not specified, this module updates the local access settings for the \ :emphasis:`hostname`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
enable_kvm_access (optional, bool, None)
@@ -58,21 +58,21 @@ Parameters
enable_chassis_power_button (True, bool, None)
Enables or disables the chassis power button.
- If ``false``, the chassis cannot be turn on or turn off using the power button.
+ If \ :literal:`false`\ , the chassis cannot be turn on or turn off using the power button.
enable_lcd_override_pin (optional, bool, None)
Enables or disables the LCD override pin.
- This is required when *enable_chassis_power_button* is ``false``.
+ This is required when \ :emphasis:`enable\_chassis\_power\_button`\ is \ :literal:`false`\ .
disabled_button_lcd_override_pin (optional, str, None)
The six digit LCD override pin to change the power state of the chassis.
- This is required when *enable_lcd_override_pin* is ``true``.
+ This is required when \ :emphasis:`enable\_lcd\_override\_pin`\ is \ :literal:`true`\ .
- The module will always report change when *disabled_button_lcd_override_pin* is ``true``.
+ The module will always report change when \ :emphasis:`disabled\_button\_lcd\_override\_pin`\ is \ :literal:`true`\ .
The value must be specified in quotes. ex: "001100".
@@ -81,17 +81,17 @@ Parameters
quick_sync (optional, dict, None)
The settings for quick sync.
- The *quick_sync* options are ignored if the quick sync hardware is not present.
+ The \ :emphasis:`quick\_sync`\ options are ignored if the quick sync hardware is not present.
quick_sync_access (optional, str, None)
- Users with administrator privileges can set the following types of *quick_sync_access*.
+ Users with administrator privileges can set the following types of \ :emphasis:`quick\_sync\_access`\ .
- ``READ_WRITE`` enables writing configuration using quick sync.
+ \ :literal:`READ\_WRITE`\ enables writing configuration using quick sync.
- ``READ_ONLY`` enables read only access to Wi-Fi and Bluetooth Low Energy(BLE).
+ \ :literal:`READ\_ONLY`\ enables read only access to Wi-Fi and Bluetooth Low Energy(BLE).
- ``DISABLED`` disables reading or writing configuration through quick sync.
+ \ :literal:`DISABLED`\ disables reading or writing configuration through quick sync.
enable_inactivity_timeout (optional, bool, None)
@@ -103,17 +103,17 @@ Parameters
The range is 120 to 3600 in seconds, or 2 to 60 in minutes.
- This option is required when *enable_inactivity_timeout* is ``true``.
+ This option is required when \ :emphasis:`enable\_inactivity\_timeout`\ is \ :literal:`true`\ .
timeout_limit_unit (optional, str, None)
Inactivity timeout limit unit.
- ``SECONDS`` to set *timeout_limit* in seconds.
+ \ :literal:`SECONDS`\ to set \ :emphasis:`timeout\_limit`\ in seconds.
- ``MINUTES`` to set *timeout_limit* in minutes.
+ \ :literal:`MINUTES`\ to set \ :emphasis:`timeout\_limit`\ in minutes.
- This option is required when *enable_inactivity_timeout* is ``true``.
+ This option is required when \ :emphasis:`enable\_inactivity\_timeout`\ is \ :literal:`true`\ .
enable_read_authentication (optional, bool, None)
@@ -128,17 +128,17 @@ Parameters
lcd (optional, dict, None)
The settings for LCD.
- The *lcd* options are ignored if the LCD hardware is not present in the chassis.
+ The \ :emphasis:`lcd`\ options are ignored if the LCD hardware is not present in the chassis.
lcd_access (optional, str, None)
Option to configure the quick sync settings using LCD.
- ``VIEW_AND_MODIFY`` to set access level to view and modify.
+ \ :literal:`VIEW\_AND\_MODIFY`\ to set access level to view and modify.
- ``VIEW_ONLY`` to set access level to view.
+ \ :literal:`VIEW\_ONLY`\ to set access level to view.
- ``DISABLED`` to disable the access.
+ \ :literal:`DISABLED`\ to disable the access.
user_defined (optional, str, None)
@@ -179,11 +179,11 @@ Parameters
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -202,8 +202,8 @@ Notes
.. note::
- Run this module from a system that has direct access to OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
- - The module will always report change when *enable_chassis_power_button* is ``true``.
+ - This module supports \ :literal:`check\_mode`\ .
+ - The module will always report change when \ :emphasis:`enable\_chassis\_power\_button`\ is \ :literal:`true`\ .
@@ -281,7 +281,7 @@ msg (always, str, Successfully updated the local access settings.)
Overall status of the device local access settings.
-location_details (success, dict, {'SettingType': 'LocalAccessConfiguration', 'EnableChassisDirect': False, 'EnableChassisPowerButton': False, 'EnableKvmAccess': True, 'EnableLcdOverridePin': False, 'LcdAccess': 'VIEW_ONLY', 'LcdCustomString': 'LCD Text', 'LcdLanguage': 'en', 'LcdOverridePin': '', 'LcdPinLength': None, 'LcdPresence': 'Present', 'LedPresence': None, 'QuickSync': {'EnableInactivityTimeout': True, 'EnableQuickSyncWifi': False, 'EnableReadAuthentication': False, 'QuickSyncAccess': 'READ_ONLY', 'QuickSyncHardware': 'Present', 'TimeoutLimit': 7, 'TimeoutLimitUnit': 'MINUTES'}})
+location_details (success, dict, {'SettingType': 'LocalAccessConfiguration', 'EnableChassisDirect': False, 'EnableChassisPowerButton': False, 'EnableKvmAccess': True, 'EnableLcdOverridePin': False, 'LcdAccess': 'VIEW_ONLY', 'LcdCustomString': 'LCD Text', 'LcdLanguage': 'en', 'LcdOverridePin': '', 'LcdPinLength': 6, 'LcdPresence': 'Present', 'LedPresence': 'Absent', 'QuickSync': {'EnableInactivityTimeout': True, 'EnableQuickSyncWifi': False, 'EnableReadAuthentication': False, 'QuickSyncAccess': 'READ_ONLY', 'QuickSyncHardware': 'Present', 'TimeoutLimit': 7, 'TimeoutLimitUnit': 'MINUTES'}})
returned when local access settings are updated successfully.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
index 185331335..6d1e1adf3 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,25 +30,25 @@ Parameters
device_id (optional, int, None)
The ID of the chassis for which the Quick Deploy settings to be deployed.
- If the device ID is not specified, this module updates the Quick Deploy settings for the *hostname*.
+ If the device ID is not specified, this module updates the Quick Deploy settings for the \ :emphasis:`hostname`\ .
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
The service tag of the chassis for which the Quick Deploy settings to be deployed.
- If the device service tag is not specified, this module updates the Quick Deploy settings for the *hostname*.
+ If the device service tag is not specified, this module updates the Quick Deploy settings for the \ :emphasis:`hostname`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
setting_type (True, str, None)
The type of the Quick Deploy settings to be applied.
- ``ServerQuickDeploy`` to apply the server Quick Deploy settings.
+ \ :literal:`ServerQuickDeploy`\ to apply the server Quick Deploy settings.
- ``IOMQuickDeploy`` to apply the IOM Quick Deploy settings.
+ \ :literal:`IOMQuickDeploy`\ to apply the IOM Quick Deploy settings.
job_wait (optional, bool, True)
@@ -56,9 +56,9 @@ Parameters
job_wait_timeout (optional, int, 120)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
quick_deploy_options (True, dict, None)
@@ -68,7 +68,7 @@ Parameters
password (optional, str, None)
The password to login to the server or IOM.
- The module will always report change when *password* option is added.
+ The module will always report change when \ :emphasis:`password`\ option is added.
ipv4_enabled (optional, bool, None)
@@ -78,23 +78,23 @@ Parameters
ipv4_network_type (optional, str, None)
IPv4 network type.
- *ipv4_network_type* is required if *ipv4_enabled* is ``true``.
+ \ :emphasis:`ipv4\_network\_type`\ is required if \ :emphasis:`ipv4\_enabled`\ is \ :literal:`true`\ .
- ``Static`` to configure the static IP settings.
+ \ :literal:`Static`\ to configure the static IP settings.
- ``DHCP`` to configure the Dynamic IP settings.
+ \ :literal:`DHCP`\ to configure the Dynamic IP settings.
ipv4_subnet_mask (optional, str, None)
IPv4 subnet mask.
- *ipv4_subnet_mask* is required if *ipv4_network_type* is ``Static``.
+ \ :emphasis:`ipv4\_subnet\_mask`\ is required if \ :emphasis:`ipv4\_network\_type`\ is \ :literal:`Static`\ .
ipv4_gateway (optional, str, None)
IPv4 gateway.
- *ipv4_gateway* is required if *ipv4_network_type* is ``Static``.
+ \ :emphasis:`ipv4\_gateway`\ is required if \ :emphasis:`ipv4\_network\_type`\ is \ :literal:`Static`\ .
ipv6_enabled (optional, bool, None)
@@ -104,23 +104,23 @@ Parameters
ipv6_network_type (optional, str, None)
IPv6 network type.
- *ipv6_network_type* is required if *ipv6_enabled* is ``true``.
+ \ :emphasis:`ipv6\_network\_type`\ is required if \ :emphasis:`ipv6\_enabled`\ is \ :literal:`true`\ .
- ``Static`` to configure the static IP settings.
+ \ :literal:`Static`\ to configure the static IP settings.
- ``DHCP`` to configure the Dynamic IP settings.
+ \ :literal:`DHCP`\ to configure the Dynamic IP settings.
ipv6_prefix_length (optional, int, None)
IPV6 prefix length.
- *ipv6_prefix_length* is required if *ipv6_network_type* is ``Static``.
+ \ :emphasis:`ipv6\_prefix\_length`\ is required if \ :emphasis:`ipv6\_network\_type`\ is \ :literal:`Static`\ .
ipv6_gateway (optional, str, None)
IPv6 gateway.
- *ipv6_gateway* is required if *ipv6_network_type* is ``Static``.
+ \ :emphasis:`ipv6\_gateway`\ is required if \ :emphasis:`ipv6\_network\_type`\ is \ :literal:`Static`\ .
slots (optional, list, None)
@@ -152,21 +152,29 @@ Parameters
username (True, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :literal:`OME\_USERNAME`\ is used.
+
+ Example: export OME\_USERNAME=username
+
password (True, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :literal:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -185,8 +193,8 @@ Notes
.. note::
- Run this module from a system that has direct access to OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
- - The module will always report change when *password* option is added.
+ - This module supports \ :literal:`check\_mode`\ .
+ - The module will always report change when \ :emphasis:`password`\ option is added.
- If the chassis is a member of a multi-chassis group and it is assigned as a backup lead chassis, the operations performed on the chassis using this module may conflict with the management operations performed on the chassis through the lead chassis.
@@ -291,4 +299,5 @@ Authors
- Felix Stephen (@felixs88)
- Shivam Sharma (@ShivamSh3)
+- Kritika Bhateja (@Kritika-Bhateja-03)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
index 756adde24..b1d56a045 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,27 +30,27 @@ Parameters
device_service_tags (optional, list, None)
Service tag of the target devices.
- This is mutually exclusive with *device_ids*.
+ This is mutually exclusive with \ :emphasis:`device\_ids`\ .
device_ids (optional, list, None)
IDs of the target devices.
- This is mutually exclusive with *device_service_tags*.
+ This is mutually exclusive with \ :emphasis:`device\_service\_tags`\ .
state (optional, str, present)
- ``present`` Allows to perform the *device_action* on the target devices.
+ \ :literal:`present`\ Allows to perform the \ :emphasis:`device\_action`\ on the target devices.
- ``absent`` Removes the device from OpenManage Enterprise. Job is not triggered. *job_wait*, *job_schedule*, *job_name*, and *job_description* are not applicable to this operation.
+ \ :literal:`absent`\ Removes the device from OpenManage Enterprise. Job is not triggered. \ :emphasis:`job\_wait`\ , \ :emphasis:`job\_schedule`\ , \ :emphasis:`job\_name`\ , and \ :emphasis:`job\_description`\ are not applicable to this operation.
device_action (optional, str, refresh_inventory)
- ``refresh_inventory`` refreshes the inventory on the target devices.
+ \ :literal:`refresh\_inventory`\ refreshes the inventory on the target devices.
- ``reset_idrac`` Triggers a reset on the target iDRACs.
+ \ :literal:`reset\_idrac`\ Triggers a reset on the target iDRACs.
- ``clear_idrac_job_queue`` Clears the job queue on the target iDRACs.
+ \ :literal:`clear\_idrac\_job\_queue`\ Clears the job queue on the target iDRACs.
A job is triggered for each action.
@@ -58,15 +58,15 @@ Parameters
job_wait (optional, bool, True)
Provides an option to wait for the job completion.
- This option is applicable when *state* is ``present``.
+ This option is applicable when \ :emphasis:`state`\ is \ :literal:`present`\ .
- This is applicable when *job_schedule* is ``startnow``.
+ This is applicable when \ :emphasis:`job\_schedule`\ is \ :literal:`startnow`\ .
job_wait_timeout (optional, int, 1200)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
job_schedule (optional, str, startnow)
@@ -98,11 +98,11 @@ Parameters
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -120,9 +120,9 @@ Notes
-----
.. note::
- - For ``idrac_reset``, the job triggers only the iDRAC reset operation and does not track the complete reset cycle.
+ - For \ :literal:`idrac\_reset`\ , the job triggers only the iDRAC reset operation and does not track the complete reset cycle.
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -216,4 +216,5 @@ Authors
~~~~~~~
- Jagadeesh N V(@jagadeeshnv)
+- ShivamSh3(@ShivamSh3)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
index f6e5f577f..c349b7d99 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
@@ -227,6 +227,7 @@ Notes
- Run this module from a system that has direct access to Redfish APIs.
- This module supports \ :literal:`check\_mode`\ .
- This module always reports changes when \ :emphasis:`name`\ and \ :emphasis:`volume\_id`\ are not specified. Either \ :emphasis:`name`\ or \ :emphasis:`volume\_id`\ is required to support \ :literal:`check\_mode`\ .
+ - This module does not support the create operation of RAID6 and RAID60 storage volume on iDRAC8
- This module supports IPv4 and IPv6 addresses.
@@ -405,4 +406,5 @@ Authors
- Sajna Shetty(@Sajna-Shetty)
- Kritika Bhateja(@Kritika-Bhateja-03)
+- Shivam Sharma(@ShivamSh3)
diff --git a/ansible_collections/dellemc/openmanage/meta/runtime.yml b/ansible_collections/dellemc/openmanage/meta/runtime.yml
index 8255bdc82..2c94ec4fa 100644
--- a/ansible_collections/dellemc/openmanage/meta/runtime.yml
+++ b/ansible_collections/dellemc/openmanage/meta/runtime.yml
@@ -1,5 +1,5 @@
---
-requires_ansible: ">=2.14.0"
+requires_ansible: ">=2.15.0"
plugin_routing:
modules:
dellemc_get_firmware_inventory:
@@ -34,3 +34,10 @@ plugin_routing:
deprecation:
removal_date: "2024-07-31"
warning_text: idrac_timezone_ntp will be removed in a future release of this collection. Use dellemc.openmanage.idrac_attributes instead.
+ dellemc_idrac_storage_volume:
+ redirect: dellemc.openmanage.idrac_storage_volume
+ deprecation:
+ removal_date: "2026-03-31"
+ warning_text: dellemc_idrac_storage_volume will be removed in a future
+ release of this collection.
+ Use dellemc.openmanage.idrac_storage_volume instead.
diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_diagnostics.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_diagnostics.yml
new file mode 100644
index 000000000..0cbc68bce
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_diagnostics.yml
@@ -0,0 +1,97 @@
+---
+- name: Dell OpenManage Ansible iDRAC Diagnostics.
+ hosts: idrac
+ gather_facts: false
+
+ tasks:
+ - name: Run and export the diagnostics to local path
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "local"
+ share_name: "/opt/local/diagnostics/"
+ file_name: "diagnostics.txt"
+ delegate_to: localhost
+
+ - name: Run the diagnostics with power cycle reboot on schedule
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ run: true
+ run_mode: "express"
+ reboot_type: "power_cycle"
+ scheduled_start_time: 20240101101015
+ delegate_to: localhost
+
+ - name: Run and export the diagnostics to HTTPS share
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ ignore_certificate_warning: "on"
+ share_name: "/share_path/diagnostics_collection_path"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+ delegate_to: localhost
+
+ - name: Run and export the diagnostics to NFS share
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "NFS"
+ share_name: "nfsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.3"
+ file_name: "diagnostics.txt"
+ delegate_to: localhost
+
+ - name: Export the diagnostics to CIFS share
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "CIFS"
+ share_name: "/cifsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.4"
+ file_name: "diagnostics.txt"
+ delegate_to: localhost
+
+ - name: Export the diagnostics to HTTPS share via proxy
+ dellemc.openmanage.idrac_diagnostics:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "idrac_user"
+ idrac_password: "idrac_password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ share_name: "/share_path/diagnostics_collection_path"
+ ignore_certificate_warning: "on"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+ proxy_support: parameters_proxy
+ proxy_type: http
+ proxy_server: "192.168.0.5"
+ proxy_port: 1080
+ proxy_username: "proxy_user"
+ proxy_password: "proxy_password"
+ delegate_to: localhost
diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml
index 69e4ac36f..a977df598 100644
--- a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml
+++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml
@@ -4,12 +4,64 @@
gather_facts: false
tasks:
- - name: Reset iDRAC
+ - name: Reset the iDRAC to all and wait till the iDRAC is accessible.
dellemc.openmanage.idrac_reset:
- idrac_ip: "{{ idrac_ip }}"
- idrac_user: "{{ idrac_user }}"
- idrac_password: "{{ idrac_password }}"
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
ca_path: "/path/to/ca_cert.pem"
- tags:
- - idrac_reset
+ reset_to_default: "All"
+ delegate_to: localhost
+
+ - name: Reset the iDRAC to default and
+ do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ wait_for_idrac: false
+ delegate_to: localhost
+
+ - name: Force reset the iDRAC to default.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ force_reset: true
+ delegate_to: localhost
+
+ - name: Gracefully restart the iDRAC.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ delegate_to: localhost
+
+ - name: Reset the iDRAC to custom defaults XML and
+ do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_file: "/path/to/custom_defaults.xml"
+ delegate_to: localhost
+
+ - name: Reset the iDRAC to custom defaults buffer input and
+ do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_buffer: "<SystemConfiguration><Component
+ FQDD='iDRAC.Embedded.1'><Attribute Name='IPMILan.1#Enable'>
+ Disabled</Attribute></Component></SystemConfiguration>"
delegate_to: localhost
diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_session.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_session.yml
new file mode 100644
index 000000000..1e369ef5b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_session.yml
@@ -0,0 +1,21 @@
+---
+- name: Dell OpenManage Ansible iDRAC Session Management.
+ hosts: idrac
+ gather_facts: false
+
+ tasks:
+ - name: Create a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ state: present
+ delegate_to: localhost
+
+ - name: Delete a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ state: absent
+ auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 2
+ delegate_to: localhost
diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_storage_volume.yml
index d3d561ca4..d32ea0dad 100644
--- a/ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml
+++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_storage_volume.yml
@@ -5,7 +5,7 @@
tasks:
- name: Create single volume.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
@@ -20,7 +20,7 @@
- create_single_volume
- name: Create multiple volume.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
@@ -63,7 +63,7 @@
- create_multiple_volume
- name: Delete single volume.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
@@ -76,7 +76,7 @@
- delete_single_volume
- name: Delete multiple volume.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
@@ -90,7 +90,7 @@
- delete_multiple_volume
- name: View specific volume details.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
@@ -103,7 +103,7 @@
- view_specific_volume
- name: View all volume details.
- dellemc.openmanage.dellemc_idrac_storage_volume:
+ dellemc.openmanage.idrac_storage_volume:
idrac_ip: "{{ idrac_ip }}"
idrac_user: "{{ idrac_user }}"
idrac_password: "{{ idrac_password }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml
index 3fd901e1c..1dbfd2141 100644
--- a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml
@@ -35,9 +35,8 @@
# and next task device_ids attribute replaced with device_service_tag.
- name: Filter the non compliant device based on the retrieved compliance report.
ansible.builtin.set_fact:
- non_compliance_devices: "{{ compliance_report.compliance_info | json_query(jquery | default('')) | map(attribute='Id') | list }}"
- vars:
- jquery: 'value[?ComplianceStatus==''NONCOMPLIANT'']'
+ non_compliance_devices: "{{ compliance_report.compliance_info | selectattr('ComplianceStatus', 'equalto', 'NONCOMPLIANT') | map(attribute='Id')
+ | list }}"
- name: Remediate a specified non-complaint devices to a configuration compliance baseline using device IDs # noqa: args[module]
dellemc.openmanage.ome_configuration_compliance_baseline:
diff --git a/ansible_collections/dellemc/openmanage/playbooks/roles/idrac_user/idrac_user.yml b/ansible_collections/dellemc/openmanage/playbooks/roles/idrac_user/idrac_user.yml
new file mode 100644
index 000000000..7bc4a9dd2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/roles/idrac_user/idrac_user.yml
@@ -0,0 +1,9 @@
+---
+- name: Idrac user Configurations
+ hosts: idrac
+ gather_facts: false
+ vars_files:
+ - ../vars_files/credentials.yml
+ - ../vars_files/user.yml
+ roles:
+ - dellemc.openmanage.idrac_user
diff --git a/ansible_collections/dellemc/openmanage/playbooks/roles/vars_files/user.yml b/ansible_collections/dellemc/openmanage/playbooks/roles/vars_files/user.yml
new file mode 100644
index 000000000..3a8d49fa1
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/roles/vars_files/user.yml
@@ -0,0 +1,15 @@
+---
+# Role idrac_user Vars
+user_name: test_user
+state: present
+new_user_name: test_user_updated
+user_password: password123
+privilege: Administrator
+ipmi_lan_privilege: No Access
+ipmi_serial_privilege: Administrator
+enable: true
+sol_enable: true
+protocol_enable: true
+authentication_protocol: SHA
+privacy_protocol: AES
+custom_privilege: 511
diff --git a/ansible_collections/dellemc/openmanage/plugins/README.md b/ansible_collections/dellemc/openmanage/plugins/README.md
index 7711a1d84..3864a2bb8 100644
--- a/ansible_collections/dellemc/openmanage/plugins/README.md
+++ b/ansible_collections/dellemc/openmanage/plugins/README.md
@@ -28,6 +28,7 @@ Here are the list of modules and module_utils supported by Dell.
├── idrac_bios.py
├── idrac_boot.py
├── idrac_certificates.py
+ ├── idrac_diagnostics.py
├── idrac_firmware.py
├── idrac_firmware_info.py
├── idrac_license.py
@@ -41,6 +42,8 @@ Here are the list of modules and module_utils supported by Dell.
├── idrac_redfish_storage_controller.py
├── idrac_reset.py
├── idrac_server_config_profile.py
+ ├── idrac_session.py
+ ├── idrac_storage_volume.py
├── idrac_syslog.py
├── idrac_system_info.py
├── idrac_timezone_ntp.py
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py
index b2b2240d0..cdd94c3cd 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/dellemc_idrac.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# Dell OpenManage Ansible Modules
-# Version 7.1.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
@@ -30,6 +30,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.common.parameters import env_fallback
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import compress_ipv6
try:
from omsdk.sdkinfra import sdkinfra
from omsdk.sdkcreds import UserCredentials
@@ -56,7 +57,7 @@ class iDRACConnection:
def __init__(self, module_params):
if not HAS_OMSDK:
raise ImportError("Dell OMSDK library is required for this module")
- self.idrac_ip = module_params['idrac_ip']
+ self.idrac_ip = compress_ipv6(module_params['idrac_ip'])
self.idrac_user = module_params['idrac_user']
self.idrac_pwd = module_params['idrac_password']
self.idrac_port = module_params['idrac_port']
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py
new file mode 100644
index 000000000..4bead057a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py
@@ -0,0 +1,322 @@
+# -*- coding: utf-8 -*-
+
+# Dell OpenManage Ansible Modules
+# Version 9.2.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+
+# * Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import os
+from ansible.module_utils.urls import open_url
+from ansible.module_utils.six.moves.urllib.parse import urlencode
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import config_ipv6
+
+HEADER_TYPE = "application/json"
+
+
+class OpenURLResponse():
+ """
+ HTTP response handler class.
+ """
+ def __init__(self, resp):
+ """
+ Initializes a new instance of the class.
+
+ Args:
+ resp (Response): The response object to read the body from.
+
+ Initializes the following instance variables:
+ - body (bytes): The body of the response, or None if the response is None.
+ - resp (Response): The response object.
+
+ If the response is not None, the body is set to the content of the response.
+ """
+ self.body = None
+ self.resp = resp
+ if self.resp:
+ self.body = self.resp.read()
+
+ @property
+ def json_data(self):
+ """
+ Returns the JSON data parsed from the `body` attribute of the object.
+
+ :return: The parsed JSON data.
+ :raises ValueError: If the `body` attribute cannot be parsed as JSON.
+ """
+ try:
+ return json.loads(self.body)
+ except ValueError as exc:
+ raise ValueError("Unable to parse json") from exc
+
+ @property
+ def status_code(self):
+ """
+ Get the status code of the response.
+
+ Returns:
+ int: The status code of the response.
+ """
+ return self.resp.getcode()
+
+ @property
+ def success(self):
+ """
+ Returns a boolean indicating whether the status code of the response is within the range
+ of 200-299.
+
+ :return: True if the status code is within the range of 200-299, False otherwise.
+ :rtype: bool
+ """
+ status = self.status_code
+ return status >= 200 & status <= 299
+
+ @property
+ def headers(self):
+ """
+ Returns the headers of the response object.
+
+ :return: A dictionary containing the headers of the response object.
+ :rtype: dict
+ """
+ return self.resp.headers
+
+ @property
+ def reason(self):
+ """
+ Get the reason for the response.
+
+ Returns:
+ str: The reason for the response.
+ """
+ return self.resp.reason
+
+
+class SessionAPI():
+ """
+ Main class for session operations.
+ """
+ def __init__(self, module_params):
+ """
+ Initializes the object with the given module parameters.
+
+ Args:
+ module_params (dict): A dictionary containing the module parameters.
+ - "hostname" (str): The IP address or hostname of the target system.
+ - "username" (str): The username for authentication.
+ - "password" (str): The password for authentication.
+ - "port" (int, optional): The port number. Defaults to None.
+ - "validate_certs" (bool, optional): Whether to validate SSL certificates. Defaults
+ to False.
+ - "ca_path" (str, optional): The path to the CA certificate file. Defaults to None.
+ - "timeout" (int, optional): The timeout value in seconds. Defaults to None.
+ - "use_proxy" (bool, optional): Whether to use a proxy. Defaults to True.
+
+ Returns:
+ None
+ """
+ self.ipaddress = module_params.get("hostname")
+ self.username = module_params.get("username")
+ self.password = module_params.get("password")
+ self.port = module_params.get("port")
+ self.validate_certs = module_params.get("validate_certs", False)
+ self.ca_path = module_params.get("ca_path")
+ self.timeout = module_params.get("timeout")
+ self.use_proxy = module_params.get("use_proxy", True)
+ self.protocol = 'https'
+ self.ipaddress = config_ipv6(self.ipaddress)
+ self.set_headers(module_params)
+
+ def set_headers(self, module_params):
+ """
+ Set the headers for the HTTP request based on the module parameters.
+
+ Parameters:
+ module_params (dict): The module parameters containing the state and auth_token.
+
+ Returns:
+ None
+
+ This function sets the headers for the HTTP request based on the state parameter in the
+ module_params.
+ If the state is "present", the headers will include 'Content-Type' and 'Accept' with values
+ 'application/json'.
+ If the state is not "present", the headers will include 'Content-Type', 'Accept', and
+ 'X-Auth-Token' with the value from the auth_token parameter in module_params.
+ """
+ if module_params.get("state") == "present":
+ self._headers = {
+ 'Content-Type': HEADER_TYPE,
+ 'Accept': HEADER_TYPE
+ }
+ else:
+ self._headers = {
+ 'Content-Type': HEADER_TYPE,
+ 'Accept': HEADER_TYPE,
+ 'X-Auth-Token': module_params.get("auth_token")
+ }
+
+ def _get_url(self, uri):
+ """
+ Generate the full URL by combining the protocol, IP address, port, and URI.
+
+ Parameters:
+ uri (str): The URI to be appended to the URL.
+
+ Returns:
+ str: The full URL generated by combining the protocol, IP address, port, and URI.
+ """
+ return f"{self.protocol}://{self.ipaddress}:{self.port}{uri}"
+
+ def _build_url(self, path, query_param=None):
+ """
+ Builds a URL by concatenating the base URI with the given path and query parameters.
+
+ Args:
+ path (str): The path component of the URL.
+ query_param (dict, optional): A dictionary of query parameters to be appended to the
+ URL. Defaults to None.
+
+ Returns:
+ str: The fully constructed URL.
+
+ Raises:
+ None
+
+ Examples:
+ >>> session = SessionUtils()
+ >>> session._build_url("/api/endpoint", {"param1": "value1", "param2": "value2"})
+ "/api/endpoint?param1=value1&param2=value2"
+ """
+ url = path
+ base_uri = self._get_url(url)
+ if path:
+ url = base_uri
+ if query_param:
+ url += f"?{urlencode(query_param)}"
+ return url
+
+ def _url_common_args_spec(self, method, api_timeout, headers=None):
+ """
+ Generates the common arguments for a URL request.
+
+ Args:
+ method (str): The HTTP method for the request.
+ api_timeout (int, optional): The timeout for the API request. If None, the default
+ timeout is used.
+ headers (dict, optional): Additional headers to include in the request.
+
+ Returns:
+ dict: A dictionary containing the common arguments for the URL request. The dictionary
+ has the following keys:
+ - method (str): The HTTP method for the request.
+ - validate_certs (bool): Whether to validate the SSL certificates.
+ - ca_path (str): The path to the CA certificate bundle.
+ - use_proxy (bool): Whether to use a proxy for the request.
+ - headers (dict): The headers to include in the request.
+ - timeout (int): The timeout for the request.
+ - follow_redirects (str): The policy for following redirects.
+
+ """
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ if api_timeout is None:
+ api_timeout = self.timeout
+ if self.ca_path is None:
+ self.ca_path = self._get_omam_ca_env()
+ url_kwargs = {
+ "method": method,
+ "validate_certs": self.validate_certs,
+ "ca_path": self.ca_path,
+ "use_proxy": self.use_proxy,
+ "headers": req_header,
+ "timeout": api_timeout,
+ "follow_redirects": 'all',
+ }
+ return url_kwargs
+
+ def _args_session(self, method, api_timeout, headers=None):
+ """
+ Returns a dictionary containing the arguments needed to establish a session.
+
+ :param path: A string representing the path of the API endpoint.
+ :param method: A string representing the HTTP method to be used.
+ :param api_timeout: An integer representing the timeout for the API request.
+ :param headers: An optional dictionary containing additional headers to be included in the
+ request.
+ :return: A dictionary containing the arguments needed to establish a session, including the
+ URL arguments, headers, and API timeout.
+ """
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ return url_kwargs
+
+ def invoke_request(self, uri, method, data=None, query_param=None, headers=None,
+ api_timeout=None, dump=True):
+ """
+ Invokes a request to the specified URI using the given method and optional parameters.
+
+ :param uri: The URI to send the request to.
+ :type uri: str
+ :param method: The HTTP method to use for the request.
+ :type method: str
+ :param data: The data to send with the request (default: None).
+ :type data: dict or None
+ :param query_param: The query parameters to include in the request URL (default: None).
+ :type query_param: dict or None
+ :param headers: The headers to include in the request (default: None).
+ :type headers: dict or None
+ :param api_timeout: The timeout for the request in seconds (default: None).
+ :type api_timeout: int or None
+ :param dump: Whether to dump the data to JSON before sending the request (default: True).
+ :type dump: bool
+ :return: The response data from the request.
+ :rtype: OpenURLResponse
+ """
+ url_kwargs = self._args_session(method, api_timeout, headers=headers)
+ if data and dump:
+ data = json.dumps(data)
+ url = self._build_url(uri, query_param=query_param)
+ resp = open_url(url, data=data, **url_kwargs)
+ resp_data = OpenURLResponse(resp)
+ return resp_data
+
+ def _get_omam_ca_env(self):
+ """
+ Returns the value of the environment variable REQUESTS_CA_BUNDLE, or if it is not set,
+ the value of the environment variable CURL_CA_BUNDLE, or if that is not set,
+ the value of the environment variable OMAM_CA_BUNDLE.
+
+ :return: The value of the environment variable, or None if none of the variables are set.
+ :rtype: str or None
+ """
+ return (os.environ.get("REQUESTS_CA_BUNDLE") or
+ os.environ.get("CURL_CA_BUNDLE") or
+ os.environ.get("OMAM_CA_BUNDLE"))
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py
index 3d8abfbe5..b838197e0 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/utils.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
@@ -85,6 +85,7 @@ def config_ipv6(hostname):
if ']:' in ip_addr:
ip_addr, port = ip_addr.split(']:')
ip_addr = ip_addr.strip('[]')
+ ip_addr = compress_ipv6(ip_addr)
if port is None or port == "":
hostname = "[{0}]".format(ip_addr)
else:
@@ -92,6 +93,20 @@ def config_ipv6(hostname):
return hostname
+def compress_ipv6(ipv6_long):
+ groups = ipv6_long.split(':')
+ temp = []
+ for group in groups:
+ group = re.sub(r'^0+', '', group)
+ group = group.lower()
+ if 0 == len(group):
+ group = '0'
+ temp.append(group)
+ tempstr = ':'.join(temp)
+ ipv6_short = re.sub(r'(:0)+', ':', tempstr, 1)
+ return ipv6_short
+
+
def job_tracking(rest_obj, job_uri, max_job_wait_sec=600, job_state_var=('LastRunStatus', 'Id'),
job_complete_states=(2060, 2020, 2090), job_fail_states=(2070, 2101, 2102, 2103),
job_running_states=(2050, 2040, 2030, 2100),
@@ -493,12 +508,14 @@ def get_current_time(redfish_obj):
return curr_time, date_offset
-def xml_data_conversion(attr_dict, fqdd=None):
+def xml_data_conversion(attr_dict, fqdd=None, custom_payload_to_add=None):
component = """<Component FQDD="{0}">{1}</Component>"""
attr = ""
for k, v in attr_dict.items():
key = re.sub(r"\.(?!\d)", "#", k)
attr += '<Attribute Name="{0}">{1}</Attribute>'.format(key, v)
+ if custom_payload_to_add:
+ attr += custom_payload_to_add
root = component.format(fqdd, attr)
return root
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/dellemc_idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/plugins/modules/dellemc_idrac_storage_volume.py
index e8021db18..050859c6d 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/dellemc_idrac_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/dellemc_idrac_storage_volume.py
@@ -18,6 +18,11 @@ DOCUMENTATION = r'''
module: dellemc_idrac_storage_volume
short_description: Configures the RAID configuration attributes
version_added: "2.0.0"
+deprecated:
+ removed_at_date: "2026-03-31"
+ why: Replaced with M(dellemc.openmanage.idrac_storage_volume).
+ alternative: Use M(dellemc.openmanage.idrac_storage_volume) instead.
+ removed_from_collection: dellemc.openmanage
description:
- This module is responsible for configuring the RAID attributes.
extends_documentation_fragment:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py
new file mode 100644
index 000000000..3df5a68fd
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py
@@ -0,0 +1,874 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.0.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = r"""
+---
+module: idrac_diagnostics
+short_description: Run and Export iDRAC diagnostics
+version_added: "9.0.0"
+description:
+ - This module allows you to run and export diagnostics on iDRAC.
+extends_documentation_fragment:
+ - dellemc.openmanage.idrac_auth_options
+options:
+ run:
+ description:
+ - Run the diagnostics job on iDRAC.
+ - Run the diagnostics job based on the I(run_mode) and save the report in the internal storage. I(reboot_type) is applicable.
+ type: bool
+ export:
+ description:
+ - Exports the diagnostics information to the given share.
+ - This operation requires I(share_parameters).
+ - When I(run) is C(true) and I(job_wait) is C(false), only then the run diagnostics job is triggered. I(export) is ignored.
+ type: bool
+ run_mode:
+ description:
+ - This option provides the choices to run the diagnostics.
+ - C(express) The express diagnostics runs a test package for each server subsystem. However,
+ it does not run the complete set of tests available in the package for each subsystem.
+ - C(extended) The extended diagnostics run all available tests in each test package for all subsystems.
+ - C(long_run) The long-run diagnostics runs express and extended tests.
+ type: str
+ choices: [express, extended, long_run]
+ default: express
+ reboot_type:
+ description:
+ - This option provides the choice to reboot the host immediately to run the diagnostics.
+ - This is applicable when I(run) is C(true).
+ - C(force) Forced graceful shutdown signals the operating system to turn off and wait for ten minutes.
+ If the operating system does not turn off, the iDRAC power cycles the system.
+ - C(graceful) Graceful shutdown waits for the operating system to turn off and wait for the system to restart.
+ - C(power_cycle) performs a power cycle for a hard reset on the device.
+ type: str
+ choices: [force, graceful, power_cycle]
+ default: graceful
+ scheduled_start_time:
+ description:
+ - Schedules the job at the specified time.
+ - The accepted formats are yyyymmddhhmmss and YYYY-MM-DDThh:mm:ss+HH:MM.
+ - This is applicable when I(run) is C(true) and I(reboot_type) is power_cycle.
+ type: str
+ scheduled_end_time:
+ description:
+ - Run the diagnostic until the specified end date and end time after the I(scheduled_start_time).
+ - The accepted formats are yyyymmddhhmmss and YYYY-MM-DDThh:mm:ss+HH:MM.
+ - If the run operation does not complete before the specified end time, then the operation fails.
+ - This is applicable when I(run) is C(True) and I(reboot_type) is C(power_cycle).
+ type: str
+ job_wait:
+ description:
+ - Provides the option to wait for job completion.
+ - This is applicable when I(run) is C(true) and I(reboot_type) is C(power_cycle).
+ - This is applicable only to run the diagnostics job.
+ type: bool
+ default: true
+ job_wait_timeout:
+ description:
+ - Time in seconds to wait for job completion.
+ - This is applicable when I(job_wait) is C(true).
+ type: int
+ default: 1200
+ share_parameters:
+ description:
+ - Parameters that are required for the export operation of diagnostics.
+ - I(share_parameters) is required when I(export) is C(true).
+ type: dict
+ suboptions:
+ share_type:
+ description:
+ - Share type of the network share.
+ - C(local) uses local path for I(export) operation.
+ - C(nfs) uses NFS share for I(export) operation.
+ - C(cifs) uses CIFS share for I(export) operation.
+ - C(http) uses HTTP share for I(export) operation.
+ - C(https) uses HTTPS share for I(export) operation.
+ type: str
+ choices: [local, nfs, cifs, http, https]
+ default: local
+ file_name:
+ description:
+ - Diagnostics file name for I(export) operation.
+ type: str
+ ip_address:
+ description:
+ - IP address of the network share.
+ - I(ip_address) is required when I(share_type) is C(nfs), C(cifs), C(http) or C(https).
+ type: str
+ share_name:
+ description:
+ - Network share or local path of the diagnostics file.
+ type: str
+ workgroup:
+ description:
+ - Workgroup of the network share.
+ - I(workgroup) is applicable only when I(share_type) is C(cifs).
+ type: str
+ username:
+ description:
+ - Username of the network share.
+ - I(username) is required when I(share_type) is C(cifs).
+ type: str
+ password:
+ description:
+ - Password of the network share.
+ - I(password) is required when I(share_type) is C(cifs).
+ type: str
+ ignore_certificate_warning:
+ description:
+ - Ignores the certificate warning while connecting to Share and is only applicable when I(share_type) is C(https).
+ - C(off) ignores the certificate warning.
+ - C(on) does not ignore the certificate warning.
+ type: str
+ choices: ["off", "on"]
+ default: "off"
+ proxy_support:
+ description:
+ - Specifies if proxy support must be used or not.
+ - C(off) does not use proxy settings.
+ - C(default_proxy) uses the default proxy settings.
+ - C(parameters_proxy) uses the specified proxy settings. I(proxy_server) is required when I(proxy_support) is C(parameters_proxy).
+ - I(proxy_support) is only applicable when I(share_type) is C(http) or C(https).
+ type: str
+ choices: ["off", "default_proxy", "parameters_proxy"]
+ default: "off"
+ proxy_type:
+ description:
+ - The proxy type of the proxy server.
+ - C(http) to select HTTP proxy.
+ - C(socks) to select SOCKS proxy.
+ - I(proxy_type) is only applicable when I(share_type) is C(http) or C(https) and when I(proxy_support) is C(parameters_proxy).
+ type: str
+ choices: [http, socks]
+ default: http
+ proxy_server:
+ description:
+ - The IP address of the proxy server.
+ - I(proxy_server) is required when I(proxy_support) is C(parameters_proxy).
+ - I(proxy_server) is only applicable when I(share_type) is C(http) or C(https) and when I(proxy_support) is C(parameters_proxy).
+ type: str
+ proxy_port:
+ description:
+ - The port of the proxy server.
+ - I(proxy_port) is only applicable when I(share_type) is C(http) or C(https) and when I(proxy_support) is C(parameters_proxy).
+ type: int
+ default: 80
+ proxy_username:
+ description:
+ - The username of the proxy server.
+ - I(proxy_username) is only applicable when I(share_type) is C(http) or C(https) and when I(proxy_support) is C(parameters_proxy).
+ type: str
+ proxy_password:
+ description:
+ - The password of the proxy server.
+ - I(proxy_password) is only applicable when I(share_type) is C(http) or C(https) and when I(proxy_support) is C(parameters_proxy).
+ type: str
+ resource_id:
+ type: str
+ description:
+ - Id of the resource.
+ - If the value for resource ID is not provided, the module picks the first resource ID available from the list of system resources returned by the iDRAC.
+requirements:
+ - "python >= 3.9.6"
+author:
+ - "Shivam Sharma(@ShivamSh3)"
+notes:
+ - Run this module from a system that has direct access to Dell iDRAC.
+ - This module supports only iDRAC9 and above.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports C(check_mode).
+ - This module requires 'Dell Diagnostics' firmware package to be present on the server.
+ - When I(share_type) is C(local) for I(export) operation, job_details are not displayed.
+"""
+
+EXAMPLES = r"""
+---
+- name: Run and export the diagnostics to local path
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "local"
+ share_path: "/opt/local/diagnostics/"
+ file_name: "diagnostics.txt"
+
+- name: Run the diagnostics with power cycle reboot on schedule
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ run_mode: "express"
+ reboot_type: "power_cycle"
+ scheduled_start_time: 20240101101015
+
+- name: Run and export the diagnostics to HTTPS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ ignore_certificate_warning: "on"
+ share_name: "/share_path/diagnostics_collection_path"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+
+- name: Run and export the diagnostics to NFS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ run: true
+ export: true
+ share_parameters:
+ share_type: "NFS"
+ share_name: "nfsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.3"
+ file_name: "diagnostics.txt"
+
+- name: Export the diagnostics to CIFS share
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "CIFS"
+ share_name: "/cifsshare/diagnostics_collection_path/"
+ ip_address: "192.168.0.4"
+ file_name: "diagnostics.txt"
+
+- name: Export the diagnostics to HTTPS share via proxy
+ dellemc.openmanage.idrac_diagnostics:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "path/to/ca_file"
+ export: true
+ share_parameters:
+ share_type: "HTTPS"
+ share_name: "/share_path/diagnostics_collection_path"
+ ignore_certificate_warning: "on"
+ ip_address: "192.168.0.2"
+ file_name: "diagnostics.txt"
+ proxy_support: parameters_proxy
+ proxy_type: http
+ proxy_server: "192.168.0.5"
+ proxy_port: 1080
+ proxy_username: "proxy_user"
+ proxy_password: "proxy_password"
+"""
+
+RETURN = r'''
+---
+msg:
+ type: str
+ description: Status of the diagnostics operation.
+ returned: always
+ sample: "Successfully ran and exported the diagnostics."
+job_details:
+ description: Returns the output for status of the job.
+ returned: For run and export operations
+ type: dict
+ sample: {
+ "ActualRunningStartTime": "2024-01-10T10:14:31",
+ "ActualRunningStopTime": "2024-01-10T10:26:34",
+ "CompletionTime": "2024-01-10T10:26:34",
+ "Description": "Job Instance",
+ "EndTime": "2024-01-10T10:30:15",
+ "Id": "JID_XXXXXXXXXXXX",
+ "JobState": "Completed",
+ "JobType": "RemoteDiagnostics",
+ "Message": "Job completed successfully.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "SYS018",
+ "Name": "Remote Diagnostics",
+ "PercentComplete": 100,
+ "StartTime": "2024-01-10T10:12:15",
+ "TargetSettingsURI": null
+ }
+diagnostics_file_path:
+ description: Returns the full path of the diagnostics file.
+ returned: For export operation
+ type: str
+ sample: "/share_path/diagnostics_collection_path/diagnostics.txt"
+error_info:
+ description: Details of the HTTP Error.
+ returned: on HTTP error
+ type: dict
+ sample: {
+ "error": {
+ "code": "Base.1.12.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "A Remote Diagnostic (ePSA) job already exists.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.9.SYS098",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "A response action is not required if the scheduled start time of the existing Remote Diagnostic (ePSA) job is ok.
+ Else, delete the existing Diagnostics (ePSA) job and recreate another with an appropriate start time.",
+ "Severity": "Informational"
+ }
+ ]
+ }
+ }
+'''
+
+import json
+import os
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
+ config_ipv6, get_current_time, get_dynamic_uri, validate_and_get_first_resource_id_uri, remove_key, idrac_redfish_job_tracking)
+from datetime import datetime
+
+MANAGERS_URI = "/redfish/v1/Managers"
+
+OEM = "Oem"
+MANUFACTURER = "Dell"
+JOBS = "Jobs"
+JOBS_EXPAND = "?$expand=*($levels=1)"
+LC_SERVICE = "DellLCService"
+ACTIONS = "Actions"
+EXPORT = "#DellLCService.ExportePSADiagnosticsResult"
+RUN = "#DellLCService.RunePSADiagnostics"
+TEST_SHARE = "#DellLCService.TestNetworkShare"
+ODATA_REGEX = "(.*?)@odata"
+ODATA = "@odata.id"
+MESSAGE_EXTENDED_INFO = "@Message.ExtendedInfo"
+TIME_FORMAT_FILE = "%Y%m%d_%H%M%S"
+TIME_FORMAT_WITHOUT_OFFSET = "%Y%m%d%H%M%S"
+TIME_FORMAT_WITH_OFFSET = "%Y-%m-%dT%H:%M:%S%z"
+SUCCESS_EXPORT_MSG = "Successfully exported the diagnostics."
+SUCCESS_RUN_MSG = "Successfully ran the diagnostics operation."
+SUCCESS_RUN_AND_EXPORT_MSG = "Successfully ran and exported the diagnostics."
+RUNNING_RUN_MSG = "Successfully triggered the job to run diagnostics."
+ALREADY_RUN_MSG = "The diagnostics job is already present."
+INVALID_DIRECTORY_MSG = "Provided directory path '{path}' is not valid."
+NO_OPERATION_SKIP_MSG = "The operation is skipped."
+INSUFFICIENT_DIRECTORY_PERMISSION_MSG = "Provided directory path '{path}' is not writable. " \
+ "Please check if the directory has appropriate permissions"
+UNSUPPORTED_FIRMWARE_MSG = "iDRAC firmware version is not supported."
+TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The parameter `job_wait_timeout` value cannot be negative or zero."
+WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+START_TIME = "The specified scheduled time occurs in the past, " \
+ "provide a future time to schedule the job."
+INVALID_TIME = "The specified date and time `{0}` to schedule the diagnostics is not valid. Enter a valid date and time."
+END_START_TIME = "The end time `{0}` to schedule the diagnostics must be greater than the start time `{1}`."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_FILE = "The diagnostics file does not exist."
+
+PROXY_SUPPORT = {"off": "Off", "default_proxy": "DefaultProxy", "parameters_proxy": "ParametersProxy"}
+STATUS_SUCCESS = [200, 202]
+
+
+class Diagnostics:
+
+ def __init__(self, idrac, module):
+ self.idrac = idrac
+ self.module = module
+ self.diagnostics_file_path = None
+ self.run_url = None
+ self.export_url = None
+ self.share_name = None
+ self.file_name = None
+
+ def execute(self):
+ # To be overridden by the subclasses
+ pass
+
+ def get_payload_details(self):
+ payload = {}
+ payload["ShareType"] = self.module.params.get('share_parameters').get('share_type').upper()
+ payload["IPAddress"] = self.module.params.get('share_parameters').get('ip_address')
+ payload["ShareName"] = self.module.params.get('share_parameters').get('share_name')
+ payload["UserName"] = self.module.params.get('share_parameters').get('username')
+ payload["Password"] = self.module.params.get('share_parameters').get('password')
+ payload["FileName"] = self.module.params.get('share_parameters').get('file_name')
+ payload["IgnoreCertWarning"] = self.module.params.get('share_parameters').get('ignore_certificate_warning').capitalize()
+ if self.module.params.get('share_parameters').get('proxy_support') == "parameters_proxy":
+ payload["ProxySupport"] = PROXY_SUPPORT[self.module.params.get('share_parameters').get('proxy_support')]
+ payload["ProxyType"] = self.module.params.get('share_parameters').get('proxy_type').upper()
+ payload["ProxyServer"] = self.module.params.get('share_parameters').get('proxy_server')
+ payload["ProxyPort"] = str(self.module.params.get('share_parameters').get('proxy_port'))
+ if self.module.params.get('share_parameters').get('proxy_username') and self.module.params.get('share_parameters').get('proxy_password'):
+ payload["ProxyUname"] = self.module.params.get('share_parameters').get('proxy_username')
+ payload["ProxyPasswd"] = self.module.params.get('share_parameters').get('proxy_password')
+ return payload
+
+ def test_network_share(self):
+ payload = self.get_payload_details()
+ del payload["FileName"]
+ payload = {key: value for key, value in payload.items() if value is not None}
+ if payload.get("ShareType") == "LOCAL":
+ path = payload.get("ShareName")
+ if not (os.path.exists(path)):
+ self.module.exit_json(msg=INVALID_DIRECTORY_MSG.format(path=path), failed=True)
+ if not os.access(path, os.W_OK):
+ self.module.exit_json(msg=INSUFFICIENT_DIRECTORY_PERMISSION_MSG.format(path=path), failed=True)
+ else:
+ try:
+ test_url = self.get_test_network_share_url()
+ self.idrac.invoke_request(test_url, "POST", data=payload)
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ message_details = filter_err.get('error').get(MESSAGE_EXTENDED_INFO)[0]
+ message = message_details.get('Message')
+ self.module.exit_json(msg=message, error_info=filter_err, failed=True)
+
+ def get_test_network_share_url(self):
+ uri, error_msg = validate_and_get_first_resource_id_uri(
+ self.module, self.idrac, MANAGERS_URI)
+ if error_msg:
+ self.module.exit_json(msg=error_msg, failed=True)
+ resp = get_dynamic_uri(self.idrac, uri)
+ url = resp.get('Links', {}).get(OEM, {}).get(MANUFACTURER, {}).get(LC_SERVICE, {}).get(ODATA, {})
+ action_resp = get_dynamic_uri(self.idrac, url)
+ url = action_resp.get(ACTIONS, {}).get(TEST_SHARE, {}).get('target', {})
+ return url
+
+
+class RunDiagnostics(Diagnostics):
+
+ def execute(self):
+ msg, job_details = None, None
+ if self.module.params.get('export'):
+ self.test_network_share()
+ self.__get_run_diagnostics_url()
+ self.check_diagnostics_jobs()
+ self.__validate_job_timeout()
+ run_diagnostics_status = self.__run_diagnostics()
+ job_status = self.__perform_job_wait(run_diagnostics_status)
+ status = run_diagnostics_status.status_code
+ if status in STATUS_SUCCESS and job_status.get('JobState') == "Completed":
+ msg = SUCCESS_RUN_MSG
+ job_details = job_status
+ if status in STATUS_SUCCESS and job_status.get('JobState') in ["Scheduled", "Scheduling", "Running", "New"]:
+ msg = RUNNING_RUN_MSG
+ job_details = job_status
+ return msg, job_details, None
+
+ def __run_diagnostics(self):
+ reboot_job_types = {
+ "graceful": "GracefulRebootWithoutForcedShutdown",
+ "force": "GracefulRebootWithForcedShutdown",
+ "power_cycle": "PowerCycle"
+ }
+ run_modes = {
+ "express": "Express",
+ "extended": "Extended",
+ "long_run": "ExpressAndExtended"
+ }
+ payload = {}
+ reboot_type = self.module.params.get('reboot_type')
+ run_mode = self.module.params.get('run_mode')
+ if reboot_type == "power_cycle":
+ if self.module.params.get('scheduled_start_time'):
+ start_time = self.__validate_time_format(self.module.params.get('scheduled_start_time'))
+ if self.__validate_time(start_time):
+ payload["ScheduledStartTime"] = start_time
+ if self.module.params.get('scheduled_end_time'):
+ end_time = self.__validate_time_format(self.module.params.get('scheduled_end_time'))
+ if self.__validate_time(end_time):
+ payload["UntilTime"] = end_time
+ if (self.module.params.get('scheduled_start_time') and self.module.params.get('scheduled_end_time')
+ and self.__validate_end_time(start_time, end_time)):
+ payload["UntilTime"] = end_time
+ payload["RebootJobType"] = reboot_job_types.get(reboot_type)
+ payload["RunMode"] = run_modes.get(run_mode)
+ run_diagnostics_status = self.idrac.invoke_request(self.run_url, "POST", data=payload)
+ return run_diagnostics_status
+
+ def __get_run_diagnostics_url(self):
+ uri, error_msg = validate_and_get_first_resource_id_uri(
+ self.module, self.idrac, MANAGERS_URI)
+ if error_msg:
+ self.module.exit_json(msg=error_msg, failed=True)
+ resp = get_dynamic_uri(self.idrac, uri)
+ url = resp.get('Links', {}).get(OEM, {}).get(MANUFACTURER, {}).get(LC_SERVICE, {}).get(ODATA, {})
+ if url:
+ action_resp = get_dynamic_uri(self.idrac, url)
+ run_url = action_resp.get(ACTIONS, {}).get(RUN, {}).get('target', {})
+ self.run_url = run_url
+ else:
+ self.module.exit_json(msg=UNSUPPORTED_FIRMWARE_MSG, failed=True)
+
+ def __validate_job_timeout(self):
+ if self.module.params.get("job_wait") and self.module.params.get("job_wait_timeout") <= 0:
+ self.module.exit_json(msg=TIMEOUT_NEGATIVE_OR_ZERO_MSG, failed=True)
+
+ def __perform_job_wait(self, run_diagnostics_status):
+ job_dict = {}
+ job_wait = self.module.params.get('job_wait')
+ job_wait_timeout = self.module.params.get('job_wait_timeout')
+ job_tracking_uri = run_diagnostics_status.headers.get("Location")
+ if job_tracking_uri:
+ job_id = job_tracking_uri.split("/")[-1]
+ res_uri = validate_and_get_first_resource_id_uri(self.module, self.idrac, MANAGERS_URI)
+ job_uri = f"{res_uri[0]}/{OEM}/{MANUFACTURER}/{JOBS}/{job_id}"
+ if job_wait:
+ job_failed, msg, job_dict, wait_time = idrac_redfish_job_tracking(self.idrac, job_uri,
+ max_job_wait_sec=job_wait_timeout,
+ sleep_interval_secs=1)
+ job_dict = remove_key(job_dict, regex_pattern=ODATA_REGEX)
+ if int(wait_time) >= int(job_wait_timeout):
+ self.module.exit_json(msg=WAIT_TIMEOUT_MSG.format(
+ job_wait_timeout), changed=True, job_status=job_dict)
+ if job_failed:
+ self.module.exit_json(
+ msg=job_dict.get("Message"), job_status=job_dict, failed=True)
+ else:
+ job_resp = self.idrac.invoke_request(job_uri, 'GET')
+ job_dict = job_resp.json_data
+ job_dict = remove_key(job_dict, regex_pattern=ODATA_REGEX)
+ return job_dict
+
+ def __validate_time_format(self, time):
+ try:
+ datetime_obj = datetime.strptime(time, TIME_FORMAT_WITH_OFFSET)
+ except ValueError:
+ try:
+ datetime_obj = datetime.strptime(time, TIME_FORMAT_WITHOUT_OFFSET)
+ except ValueError:
+ self.module.exit_json(failed=True, msg=INVALID_TIME.format(time))
+ formatted_time = datetime_obj.strftime(TIME_FORMAT_WITHOUT_OFFSET)
+ return formatted_time
+
+ def __validate_time(self, time):
+ curr_idrac_time, offset = get_current_time(self.idrac)
+ curr_idrac_time = datetime.strptime(curr_idrac_time, TIME_FORMAT_WITH_OFFSET)
+ curr_idrac_time = curr_idrac_time.strftime(TIME_FORMAT_WITHOUT_OFFSET)
+ currtime_obj = datetime.strptime(curr_idrac_time, TIME_FORMAT_WITHOUT_OFFSET)
+ starttime_obj = datetime.strptime(time, TIME_FORMAT_WITHOUT_OFFSET)
+ if starttime_obj < currtime_obj:
+ self.module.exit_json(failed=True, msg=START_TIME)
+ return True
+
+ def __validate_end_time(self, start_time, end_time):
+ starttime_obj = datetime.strptime(start_time, TIME_FORMAT_WITHOUT_OFFSET)
+ endtime_obj = datetime.strptime(end_time, TIME_FORMAT_WITHOUT_OFFSET)
+ if starttime_obj > endtime_obj:
+ self.module.exit_json(failed=True, msg=END_START_TIME.format(end_time, start_time))
+ return True
+
+ def check_diagnostics_jobs(self):
+ res_uri = validate_and_get_first_resource_id_uri(self.module, self.idrac, MANAGERS_URI)
+ job_uri = f"{res_uri[0]}/{OEM}/{MANUFACTURER}/{JOBS}{JOBS_EXPAND}"
+ job_resp = self.idrac.invoke_request(job_uri, "GET")
+ job_list = job_resp.json_data.get('Members', [])
+ job_id = ""
+ for jb in job_list:
+ if jb.get("JobType") == "RemoteDiagnostics" and jb.get("JobState") in ["Scheduled", "Running", "Starting", "New"]:
+ job_id = jb['Id']
+ job_dict = remove_key(jb, regex_pattern=ODATA_REGEX)
+ break
+ if self.module.check_mode and job_id:
+ self.module.exit_json(msg=ALREADY_RUN_MSG, job_details=job_dict, skipped=True)
+ if self.module.check_mode and not job_id:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ if job_id:
+ self.module.exit_json(msg=ALREADY_RUN_MSG, job_details=job_dict, skipped=True)
+
+
+class ExportDiagnostics(Diagnostics):
+
+ def execute(self):
+ self.test_network_share()
+ self.__get_export_diagnostics_url()
+ if self.module.check_mode:
+ self.perform_check_mode()
+ job_status = {}
+ share_type = self.module.params.get('share_parameters').get('share_type')
+ share_type_methods = {
+ "local": self.__export_diagnostics_local,
+ "http": self.__export_diagnostics_http,
+ "https": self.__export_diagnostics_http,
+ "cifs": self.__export_diagnostics_cifs,
+ "nfs": self.__export_diagnostics_nfs
+ }
+ export_diagnostics_status = share_type_methods[share_type]()
+ if share_type != "local":
+ job_status = self.get_job_status(export_diagnostics_status)
+ status = export_diagnostics_status.status_code
+ diagnostics_file_path = f"{self.share_name}/{self.file_name}"
+ if status in STATUS_SUCCESS:
+ msg = SUCCESS_EXPORT_MSG
+ job_details = job_status
+ return msg, job_details, diagnostics_file_path
+
+ def __export_diagnostics_local(self):
+ payload = {}
+ payload["ShareType"] = "Local"
+ file_path = self.module.params.get('share_parameters').get('share_name')
+ self.share_name = file_path.rstrip("/")
+ diagnostics_status = self.__export_diagnostics(payload)
+ diagnostics_file_name = payload.get("FileName")
+ diagnostics_data = self.idrac.invoke_request(diagnostics_status.headers.get("Location"), "GET")
+ file_name = os.path.join(file_path, diagnostics_file_name)
+ with open(file_name, "w") as fp:
+ fp.write(diagnostics_data.body.decode().replace("\r", ""))
+ return diagnostics_status
+
+ def __export_diagnostics_http(self):
+ payload = self.get_payload_details()
+ export_status = self.__export_diagnostics(payload)
+ share = self.module.params.get('share_parameters')
+ ip = config_ipv6(share.get('ip_address'))
+ self.share_name = f"{share.get('share_type')}://{ip}/{share.get('share_name').strip('/')}"
+ return export_status
+
+ def __export_diagnostics_cifs(self):
+ payload = self.get_payload_details()
+ if self.module.params.get('share_parameters').get('workgroup'):
+ payload["Workgroup"] = self.module.params.get('share_parameters').get('workgroup')
+ export_status = self.__export_diagnostics(payload)
+ share_name = self.module.params.get('share_parameters').get('share_name').replace("\\", "/")
+ self.share_name = f"//{self.module.params.get('share_parameters').get('ip_address')}/{share_name.strip('/')}"
+ return export_status
+
+ def __export_diagnostics_nfs(self):
+ payload = self.get_payload_details()
+ del payload["UserName"], payload["Password"]
+ export_status = self.__export_diagnostics(payload)
+ share = self.module.params.get('share_parameters')
+ self.share_name = f"{share.get('ip_address')}:/{share.get('share_name').strip('/')}"
+ return export_status
+
+ def __get_export_diagnostics_url(self):
+ uri, error_msg = validate_and_get_first_resource_id_uri(
+ self.module, self.idrac, MANAGERS_URI)
+ if error_msg:
+ self.module.exit_json(msg=error_msg, failed=True)
+ resp = get_dynamic_uri(self.idrac, uri)
+ url = resp.get('Links', {}).get(OEM, {}).get(MANUFACTURER, {}).get(LC_SERVICE, {}).get(ODATA, {})
+ if url:
+ action_resp = get_dynamic_uri(self.idrac, url)
+ export_url = action_resp.get(ACTIONS, {}).get(EXPORT, {}).get('target', {})
+ self.export_url = export_url
+ else:
+ self.module.exit_json(msg=UNSUPPORTED_FIRMWARE_MSG, failed=True)
+
+ def __export_diagnostics(self, payload):
+ diagnostics_file_name = self.module.params.get('share_parameters').get('file_name')
+ if not diagnostics_file_name:
+ now = datetime.now()
+ hostname = self.module.params.get('idrac_ip')
+ hostname = self.expand_ipv6(hostname)
+ hostname = hostname.replace(":", ".")
+ diagnostics_file_name = f"{hostname}_{now.strftime(TIME_FORMAT_FILE)}.txt"
+ payload["FileName"] = diagnostics_file_name
+ self.file_name = diagnostics_file_name
+ diagnostics_status = self.idrac.invoke_request(self.export_url, "POST", data=payload)
+ return diagnostics_status
+
+ def get_job_status(self, export_diagnostics_status):
+ res_uri = validate_and_get_first_resource_id_uri(self.module, self.idrac, MANAGERS_URI)
+ job_tracking_uri = export_diagnostics_status.headers.get("Location")
+ job_id = job_tracking_uri.split("/")[-1]
+ job_uri = f"{res_uri[0]}/{OEM}/{MANUFACTURER}/{JOBS}/{job_id}"
+ job_failed, msg, job_dict, wait_time = idrac_redfish_job_tracking(self.idrac, job_uri)
+ job_dict = remove_key(job_dict, regex_pattern=ODATA_REGEX)
+ if job_failed:
+ self.module.exit_json(msg=job_dict.get('Message'), failed=True, job_details=job_dict)
+ return job_dict
+
+ def perform_check_mode(self):
+ try:
+ payload = {}
+ payload['ShareType'] = 'Local'
+ export_status = self.idrac.invoke_request(self.export_url, "POST", data=payload)
+ if export_status.status_code in STATUS_SUCCESS:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ message_details = filter_err.get('error').get(MESSAGE_EXTENDED_INFO)[0]
+ message_id = message_details.get('MessageId')
+ if 'SYS099' in message_id:
+ self.module.exit_json(msg=NO_FILE, skipped=True)
+
+ def expand_ipv6(self, ip):
+ sections = ip.split(':')
+ num_sections = len(sections)
+ double_colon_index = sections.index('') if '' in sections else -1
+ if double_colon_index != -1:
+ missing_sections = 8 - num_sections + 1
+ sections[double_colon_index:double_colon_index + 1] = ['0000'] * missing_sections
+ sections = [section.zfill(4) for section in sections]
+ expanded_ip = ':'.join(sections)
+ return expanded_ip
+
+
+class RunAndExportDiagnostics:
+
+ def __init__(self, idrac, module):
+ self.run = RunDiagnostics(idrac, module)
+ self.export = ExportDiagnostics(idrac, module)
+ self.module = module
+
+ def execute(self):
+ msg, job_status, file_path = self.run.execute()
+ if self.module.params.get("job_wait"):
+ msg, job_status, file_path = self.export.execute()
+ msg = SUCCESS_RUN_AND_EXPORT_MSG
+ return msg, job_status, file_path
+
+
+class DiagnosticsType:
+ _diagnostics_classes = {
+ "run": RunDiagnostics,
+ "export": ExportDiagnostics,
+ "run_and_export": RunAndExportDiagnostics
+ }
+
+ @staticmethod
+ def diagnostics_operation(idrac, module):
+ class_type = None
+ if module.params.get("run") and module.params.get("export"):
+ class_type = "run_and_export"
+ elif module.params.get("run"):
+ class_type = "run"
+ elif module.params.get("export"):
+ class_type = "export"
+ if class_type:
+ diagnostics_class = DiagnosticsType._diagnostics_classes.get(class_type)
+ return diagnostics_class(idrac, module)
+ else:
+ module.exit_json(msg=NO_OPERATION_SKIP_MSG, skipped=True)
+
+
+def main():
+ specs = get_argument_spec()
+ specs.update(idrac_auth_params)
+ module = AnsibleModule(
+ argument_spec=specs,
+ required_one_of=[["run", "export"]],
+ required_if=[
+ ["run", True, ("reboot_type", "run_mode",)],
+ ["export", True, ("share_parameters",)]
+ ],
+ supports_check_mode=True
+ )
+
+ try:
+ with iDRACRedfishAPI(module.params) as idrac:
+ diagnostics_obj = DiagnosticsType.diagnostics_operation(idrac, module)
+ msg, job_status, file_path = diagnostics_obj.execute()
+ if file_path is None:
+ module.exit_json(msg=msg, changed=True, job_details=job_status)
+ module.exit_json(msg=msg, changed=True, job_details=job_status, diagnostics_file_path=file_path)
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ message_details = filter_err.get('error').get(MESSAGE_EXTENDED_INFO)[0]
+ message_id = message_details.get('MessageId')
+ if 'SYS099' in message_id:
+ module.exit_json(msg=NO_FILE, skipped=True)
+ if 'SYS098' in message_id:
+ module.exit_json(msg=message_details.get('Message'), skipped=True)
+ module.exit_json(msg=str(err), error_info=filter_err, failed=True)
+ except URLError as err:
+ module.exit_json(msg=str(err), unreachable=True)
+ except (OSError, ValueError, SSLValidationError, ConnectionError, TypeError) as e:
+ module.exit_json(msg=str(e), failed=True)
+
+
+def get_argument_spec():
+ return {
+ "run": {"type": 'bool'},
+ "export": {"type": 'bool'},
+ "run_mode": {
+ "type": 'str',
+ "default": 'express',
+ "choices": ['express', 'extended', 'long_run']
+ },
+ "reboot_type": {
+ "type": 'str',
+ "default": 'graceful',
+ "choices": ['force', 'graceful', 'power_cycle']
+ },
+ "scheduled_start_time": {"type": 'str'},
+ "scheduled_end_time": {"type": 'str'},
+ "job_wait": {"type": 'bool', "default": True},
+ "job_wait_timeout": {"type": 'int', "default": 1200},
+ "share_parameters": {
+ "type": 'dict',
+ "options": {
+ "share_type": {
+ "type": 'str',
+ "default": 'local',
+ "choices": ['local', 'nfs', 'cifs', 'http', 'https']
+ },
+ "proxy_type": {
+ "type": 'str',
+ "default": 'http',
+ "choices": ['http', 'socks']
+ },
+ "username": {"type": 'str'},
+ "password": {"type": 'str', "no_log": True},
+ "proxy_port": {"type": 'int', "default": 80},
+ "file_name": {"type": 'str'},
+ "ignore_certificate_warning": {
+ "type": 'str',
+ "default": "off",
+ "choices": ["off", "on"]
+ },
+ "ip_address": {"type": 'str'},
+ "proxy_server": {"type": 'str'},
+ "workgroup": {"type": 'str'},
+ "proxy_support": {
+ "type": 'str',
+ "default": "off",
+ "choices": ["off", "default_proxy", "parameters_proxy"]
+ },
+ "share_name": {"type": 'str'},
+ "proxy_username": {"type": 'str'},
+ "proxy_password": {"type": 'str', "no_log": True}
+ },
+ "required_together": [
+ ("username", "password"),
+ ("proxy_username", "proxy_password")
+ ],
+ "required_if": [
+ ["share_type", "local", ["share_name"]],
+ ["share_type", "nfs", ["ip_address", "share_name"]],
+ ["share_type", "cifs", ["ip_address", "share_name", "username", "password"]],
+ ["share_type", "http", ["ip_address", "share_name"]],
+ ["share_type", "https", ["ip_address", "share_name"]],
+ ["proxy_support", "parameters_proxy", ["proxy_server"]]
+ ],
+ },
+ "resource_id": {"type": 'str'}
+ }
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
index 2c28c9a5f..940f86dc2 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
@@ -3,64 +3,152 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.1.0
-# Copyright (C) 2018-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.2.0
+# Copyright (C) 2018-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-DOCUMENTATION = """
+DOCUMENTATION = r'''
---
module: idrac_reset
-short_description: Reset iDRAC
+short_description: Factory reset the iDRACs
version_added: "2.1.0"
description:
- - This module resets iDRAC.
- - "iDRAC is not accessible for some time after running this module. It is recommended to wait for some time,
- before trying to connect to iDRAC."
+ - This module resets the iDRAC to factory default settings.
extends_documentation_fragment:
- dellemc.openmanage.idrac_auth_options
+options:
+ reset_to_default:
+ type: str
+ description:
+ - If this value is not set the default behaviour is to restart the iDRAC.
+ - C(All) Discards all settings and reset to default credentials.
+ - C(ResetAllWithRootDefaults) Discards all settings and reset the default username to root and password to the shipping value.
+ - C(Default) Discards all settings, but preserves user and network settings.
+ - C(CustomDefaults) All configuration is set to custom defaults.This option is supported on firmware version 7.00.00.00 and newer versions.
+ choices: ['Default', 'All', 'ResetAllWithRootDefaults', 'CustomDefaults']
+ version_added: 9.2.0
+ custom_defaults_file:
+ description:
+ - Name of the custom default configuration file in the XML format.
+ - This option is applicable when I(reset_to_default) is C(CustomDefaults).
+ - I(custom_defaults_file) is mutually exclusive with I(custom_defaults_buffer).
+ type: str
+ version_added: 9.2.0
+ custom_defaults_buffer:
+ description:
+ - This parameter provides the option to import the buffer input in XML format as a custom default configuration.
+ - This option is applicable when I(reset_to_default) is C(CustomDefaults).
+ - I(custom_defaults_buffer) is mutually exclusive with I(custom_defaults_file).
+ type: str
+ version_added: 9.2.0
+ wait_for_idrac:
+ description:
+ - This parameter provides the option to wait for the iDRAC to reset and lifecycle controller status to be ready.
+ type: bool
+ default: true
+ version_added: 9.2.0
+ job_wait_timeout:
+ description:
+ - Time in seconds to wait for job completion.
+ - This is applicable when I(job_wait) is C(true).
+ type: int
+ default: 600
+ version_added: 9.2.0
+ force_reset:
+ description:
+ - This parameter provides the option to force reset the iDRAC without checking the iDRAC lifecycle controller status.
+ - This option is applicable only for iDRAC9.
+ type: bool
+ default: false
+ version_added: 9.2.0
requirements:
- - "omsdk >= 1.2.488"
- "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
- "Anooja Vardhineni (@anooja-vardhineni)"
+ - "Lovepreet Singh (@singh-lovepreet1)"
notes:
- Run this module from a system that has direct access to Dell iDRAC.
- This module supports both IPv4 and IPv6 address for I(idrac_ip).
- This module supports C(check_mode).
-"""
+ - If reset_to_default option is not specified, then this module triggers a graceful restart.
+ - This module skips the execution if reset options are not supported by the iDRAC.
+'''
-EXAMPLES = """
+EXAMPLES = r'''
---
-- name: Reset iDRAC
+- name: Reset the iDRAC to all and wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "All"
+
+- name: Reset the iDRAC to default and do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ wait_for_idrac: false
+
+- name: Force reset the iDRAC to default.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "Default"
+ force_reset: true
+
+- name: Gracefully restart the iDRAC.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+
+- name: Reset the iDRAC to custom defaults XML and do not wait till the iDRAC is accessible.
+ dellemc.openmanage.idrac_reset:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_file: "/path/to/custom_defaults.xml"
+
+- name: Reset the iDRAC to custom defaults buffer input and do not wait till the iDRAC is accessible.
dellemc.openmanage.idrac_reset:
- idrac_ip: "192.168.0.1"
- idrac_user: "user_name"
- idrac_password: "user_password"
- idrac_port: 443
- ca_path: "/path/to/ca_cert.pem"
-"""
+ idrac_ip: "192.168.0.1"
+ idrac_user: "user_name"
+ idrac_password: "user_password"
+ ca_path: "/path/to/ca_cert.pem"
+ reset_to_default: "CustomDefaults"
+ custom_defaults_buffer: "<SystemConfiguration Model=\"PowerEdge R7525\" ServiceTag=\"ABCD123\">\n<Component FQDD=\"iDRAC.Embedded.1\">\n
+ <Attribute Name=\"IPMILan.1#Enable\">Disabled</Attribute>\n </Component>\n\n</SystemConfiguration>"
+'''
RETURN = r'''
---
msg:
+ type: str
description: Status of the iDRAC reset operation.
returned: always
- type: str
sample: "Successfully performed iDRAC reset."
reset_status:
- description: Details of iDRAC reset operation.
- returned: always
- type: dict
- sample: {
- "idracreset": {
+ type: dict
+ description: Details of iDRAC reset operation.
+ returned: reset operation is triggered.
+ sample: {
+ "idracreset": {
"Data": {
"StatusCode": 204
},
@@ -92,41 +180,382 @@ error_info:
}
'''
-
+import os
import json
-from ansible_collections.dellemc.openmanage.plugins.module_utils.dellemc_idrac import iDRACConnection, idrac_auth_params
+import time
+from urllib.error import HTTPError, URLError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
from ansible.module_utils.basic import AnsibleModule
-from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
+ get_idrac_firmware_version, remove_key, get_dynamic_uri, validate_and_get_first_resource_id_uri, idrac_redfish_job_tracking)
+
+
+MANAGERS_URI = "/redfish/v1/Managers"
+OEM = "Oem"
+MANUFACTURER = "Dell"
+ACTIONS = "Actions"
+IDRAC_RESET_RETRIES = 50
+LC_STATUS_CHECK_SLEEP = 30
+IDRAC_JOB_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/{job_id}"
+RESET_TO_DEFAULT_ERROR = "{reset_to_default} is not supported. The supported values are {supported_values}. Enter the valid values and retry the operation."
+RESET_TO_DEFAULT_ERROR_MSG = "{reset_to_default} is not supported."
+CUSTOM_ERROR = "{reset_to_default} is not supported on this firmware version of iDRAC. The supported values are {supported_values}. \
+Enter the valid values and retry the operation."
+IDRAC_RESET_RESTART_SUCCESS_MSG = "iDRAC restart operation completed successfully."
+IDRAC_RESET_SUCCESS_MSG = "Successfully performed iDRAC reset."
+IDRAC_RESET_RESET_TRIGGER_MSG = "iDRAC reset operation triggered successfully."
+IDRAC_RESET_RESTART_TRIGGER_MSG = "iDRAC restart operation triggered successfully."
+INVALID_DIRECTORY_MSG = "Provided directory path '{path}' is invalid."
+FAILED_RESET_MSG = "Failed to perform the reset operation."
+RESET_UNTRACK = "iDRAC reset is in progress. Changes will apply once the iDRAC reset operation is successfully completed."
+TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The value of `job_wait_timeout` parameter cannot be negative or zero. Enter the valid value and retry the operation."
+INVALID_FILE_MSG = "File extension is invalid. Supported extension for 'custom_default_file' is: .xml."
+LC_STATUS_MSG = "Lifecycle controller status check is {lc_status} after {retries} number of retries, Exiting.."
+INSUFFICIENT_DIRECTORY_PERMISSION_MSG = "Provided directory path '{path}' is not writable. Please check if the directory has appropriate permissions."
+UNSUPPORTED_LC_STATUS_MSG = "Lifecycle controller status check is not supported."
+MINIMUM_SUPPORTED_FIRMWARE_VERSION = "7.00.00"
+CHANGES_NOT_FOUND = "No changes found to commit!"
+CHANGES_FOUND = "Changes found to commit!"
+ODATA_ID = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+ATTRIBUTE = "</Attribute>"
+SUCCESS_STATUS = "Success"
+FAILED_STATUS = "Failed"
+STATUS_SUCCESS = [200, 202, 204]
+ERR_STATUS_CODE = [400, 404]
+PASSWORD_CHANGE_OPTIONS = ['All', 'ResetAllWithRootDefaults']
+RESET_KEY = "Oem.#DellManager.ResetToDefaults"
+GRACEFUL_RESTART_KEY = "#Manager.Reset"
+
+
+class Validation():
+ def __init__(self, idrac, module):
+ self.idrac = idrac
+ self.module = module
+ self.base_uri = self.get_base_uri()
+ def get_base_uri(self):
+ uri, error_msg = validate_and_get_first_resource_id_uri(
+ self.module, self.idrac, MANAGERS_URI)
+ if error_msg:
+ self.module.exit_json(msg=error_msg, failed=True)
+ return uri
-def run_idrac_reset(idrac, module):
- if module.check_mode:
- msg = {'Status': 'Success', 'Message': 'Changes found to commit!', 'changes_applicable': True}
- else:
- idrac.use_redfish = True
- msg = idrac.config_mgr.reset_idrac()
- return msg
+ def validate_reset_options(self, api_key):
+ res = self.idrac.invoke_request(self.base_uri, "GET")
+ reset_to_default = self.module.params.get('reset_to_default')
+ key_list = api_key.split(".", 1)
+ is_valid = True
+ allowed_values = None
+ if key_list[0] in res.json_data["Actions"] and key_list[1] in res.json_data["Actions"][key_list[0]]:
+ reset_to_defaults_val = res.json_data["Actions"][key_list[0]][key_list[1]]
+ reset_type_values = reset_to_defaults_val["ResetType@Redfish.AllowableValues"]
+ allowed_values = reset_type_values
+ if reset_to_default not in reset_type_values:
+ is_valid = False
+ else:
+ is_valid = False
+ return allowed_values, is_valid
+
+ def validate_graceful_restart_option(self, api_key):
+ res = self.idrac.invoke_request(self.base_uri, "GET")
+ is_valid = True
+ if api_key in res.json_data["Actions"]:
+ reset_to_defaults_val = res.json_data["Actions"][api_key]
+ reset_type_values = reset_to_defaults_val["ResetType@Redfish.AllowableValues"]
+ if "GracefulRestart" not in reset_type_values:
+ is_valid = False
+ else:
+ is_valid = False
+ return is_valid
+
+ def validate_job_timeout(self):
+ if self.module.params.get("wait_for_idrac") and self.module.params.get("job_wait_timeout") <= 0:
+ self.module.exit_json(msg=TIMEOUT_NEGATIVE_OR_ZERO_MSG, failed=True)
+
+ def validate_path(self, file_path):
+ if not (os.path.exists(file_path)):
+ self.module.exit_json(msg=INVALID_DIRECTORY_MSG.format(path=file_path), failed=True)
+ if not os.access(file_path, os.W_OK):
+ self.module.exit_json(msg=INSUFFICIENT_DIRECTORY_PERMISSION_MSG.format(path=file_path), failed=True)
+
+ def validate_file_format(self, file_name):
+ if not (file_name.endswith(".xml")):
+ self.module.exit_json(msg=INVALID_FILE_MSG, failed=True)
+
+ def validate_custom_option(self, reset_to_default=None, allowed_choices=None):
+ url = None
+ resp = get_dynamic_uri(self.idrac, self.base_uri, OEM)
+ if resp:
+ url = resp.get(MANUFACTURER, {}).get('CustomDefaultsDownloadURI', {})
+ try:
+ if url:
+ self.idrac.invoke_request(url, "GET")
+ return True
+ return False
+ except HTTPError as err:
+ if err.code in ERR_STATUS_CODE:
+ self.module.exit_json(msg=RESET_TO_DEFAULT_ERROR.format(reset_to_default=reset_to_default, supported_values=allowed_choices), skipped=True)
+
+
+class FactoryReset():
+ def __init__(self, idrac, module, allowed_choices):
+ self.idrac = idrac
+ self.module = module
+ self.allowed_choices = allowed_choices
+ self.reset_to_default = self.module.params.get('reset_to_default')
+ self.force_reset = self.module.params.get('force_reset')
+ self.wait_for_idrac = self.module.params.get('wait_for_idrac')
+ self.validate_obj = Validation(self.idrac, self.module)
+ self.uri = self.validate_obj.base_uri
+ self.idrac_firmware_version = get_idrac_firmware_version(self.idrac)
+
+ def execute(self):
+ msg_res, job_res = None, None
+ self.validate_obj.validate_job_timeout()
+ is_idrac9 = self.is_check_idrac_latest()
+ if not is_idrac9 and self.reset_to_default:
+ allowed_values, is_valid_option = self.validate_obj.validate_reset_options(RESET_KEY)
+ if self.module.check_mode and not is_valid_option:
+ self.module.exit_json(msg=CHANGES_NOT_FOUND)
+ if not is_valid_option:
+ self.module.exit_json(msg=RESET_TO_DEFAULT_ERROR_MSG.format(reset_to_default=self.reset_to_default),
+ skipped=True)
+ if self.module.check_mode:
+ self.check_mode_output(is_idrac9)
+ if is_idrac9 and not self.force_reset:
+ self.check_lcstatus(post_op=False)
+ reset_status_mapping = {key: self.reset_to_default_mapped for key in ['Default', 'All', 'ResetAllWithRootDefaults']}
+ reset_status_mapping.update({
+ 'CustomDefaults': self.reset_custom_defaults,
+ 'None': self.graceful_restart
+ })
+ msg_res, job_res = reset_status_mapping[str(self.reset_to_default)]()
+ if is_idrac9 and self.wait_for_idrac:
+ self.check_lcstatus()
+ return msg_res, job_res
+
+ def check_mode_output(self, is_idrac9):
+ if is_idrac9 and self.reset_to_default == 'CustomDefaults' and LooseVersion(self.idrac_firmware_version) < MINIMUM_SUPPORTED_FIRMWARE_VERSION:
+ self.module.exit_json(msg=CHANGES_NOT_FOUND)
+ if self.reset_to_default:
+ allowed_values, is_valid_option = self.validate_obj.validate_reset_options(RESET_KEY)
+ else:
+ is_valid_option = self.validate_obj.validate_graceful_restart_option(GRACEFUL_RESTART_KEY)
+ custom_default_file = self.module.params.get('custom_defaults_file')
+ custom_default_buffer = self.module.params.get('custom_defaults_buffer')
+ if is_valid_option:
+ self.module.exit_json(msg=CHANGES_FOUND, changed=True)
+ elif self.reset_to_default and self.reset_to_default == 'CustomDefaults' and (custom_default_file or custom_default_buffer):
+ self.module.exit_json(msg=CHANGES_FOUND, changed=True)
+ else:
+ self.module.exit_json(msg=CHANGES_NOT_FOUND)
+
+ def is_check_idrac_latest(self):
+ if LooseVersion(self.idrac_firmware_version) >= '3.0':
+ return True
+
+ def check_lcstatus(self, post_op=True):
+ if self.reset_to_default in PASSWORD_CHANGE_OPTIONS and post_op and self.staus_code_after_wait == 401:
+ return
+ lc_status_dict = {}
+ lc_status_dict['LCStatus'] = ""
+ retry_count = 1
+ resp = get_dynamic_uri(self.idrac, self.uri, "Links")
+ url = resp.get(OEM, {}).get(MANUFACTURER, {}).get('DellLCService', {}).get(ODATA_ID, {})
+ if url:
+ action_resp = get_dynamic_uri(self.idrac, url)
+ lc_url = action_resp.get(ACTIONS, {}).get('#DellLCService.GetRemoteServicesAPIStatus', {}).get('target', {})
+ else:
+ self.module.exit_json(msg=UNSUPPORTED_LC_STATUS_MSG, failed=True)
+ while retry_count < IDRAC_RESET_RETRIES:
+ try:
+ lcstatus = self.idrac.invoke_request(lc_url, "POST", data="{}", dump=False)
+ lcstatus_data = lcstatus.json_data.get('LCStatus')
+ lc_status_dict['LCStatus'] = lcstatus_data
+ if lc_status_dict.get('LCStatus') == 'Ready':
+ break
+ time.sleep(10)
+ retry_count = retry_count + 1
+ except URLError:
+ time.sleep(10)
+ retry_count = retry_count + 1
+ if retry_count == IDRAC_RESET_RETRIES:
+ self.module.exit_json(msg=LC_STATUS_MSG.format(lc_status='unreachable', retries=IDRAC_RESET_RETRIES), unreachable=True)
+
+ if retry_count == IDRAC_RESET_RETRIES and lc_status_dict.get('LCStatus') != "Ready":
+ self.module.exit_json(msg=LC_STATUS_MSG.format(lc_status=lc_status_dict.get('LCStatus'), retries=retry_count), failed=True)
+
+ def create_output(self, status):
+ result = {}
+ tmp_res = {}
+ result['idracreset'] = {}
+ result['idracreset']['Data'] = {'StatusCode': status}
+ result['idracreset']['StatusCode'] = status
+ track_failed, wait_msg = None, None
+ self.staus_code_after_wait = 202
+ if status in STATUS_SUCCESS:
+ if self.wait_for_idrac:
+ track_failed, status_code, wait_msg = self.wait_for_port_open()
+ self.staus_code_after_wait = status_code
+ if track_failed:
+ self.module.exit_json(msg=wait_msg, changed=True)
+ tmp_res['msg'] = IDRAC_RESET_SUCCESS_MSG if self.wait_for_idrac else IDRAC_RESET_RESET_TRIGGER_MSG
+ tmp_res['changed'] = True
+ result['idracreset']['Message'] = IDRAC_RESET_SUCCESS_MSG if self.wait_for_idrac else IDRAC_RESET_RESET_TRIGGER_MSG
+ result['idracreset']['Status'] = 'Success'
+ result['idracreset']['retVal'] = True
+ else:
+ tmp_res['msg'] = FAILED_RESET_MSG
+ tmp_res['changed'] = False
+ result['idracreset']['Message'] = FAILED_RESET_MSG
+ result['idracreset']['Status'] = 'FAILED'
+ result['idracreset']['retVal'] = False
+ if self.reset_to_default:
+ result = None
+ return tmp_res, result
+
+ def perform_operation(self, payload):
+ tmp_res, res = None, None
+ url = None
+ resp = get_dynamic_uri(self.idrac, self.uri, ACTIONS)
+ if resp:
+ url = resp.get(OEM, {}).get('#DellManager.ResetToDefaults', {}).get('target', {})
+ run_reset_status = self.idrac.invoke_request(url, "POST", data=payload)
+ status = run_reset_status.status_code
+ tmp_res, res = self.create_output(status)
+ return tmp_res, res
+
+ def upload_cd_content(self, data):
+ payload = {"CustomDefaults": data}
+ job_wait_timeout = self.module.params.get('job_wait_timeout')
+ url = None
+ resp = get_dynamic_uri(self.idrac, self.uri, ACTIONS)
+ if resp:
+ url = resp.get(OEM, {}).get('#DellManager.SetCustomDefaults', {}).get('target', {})
+ job_resp = self.idrac.invoke_request(url, "POST", data=payload)
+ if (job_tracking_uri := job_resp.headers.get("Location")):
+ job_id = job_tracking_uri.split("/")[-1]
+ job_uri = IDRAC_JOB_URI.format(job_id=job_id)
+ job_failed, msg, job_dict, wait_time = idrac_redfish_job_tracking(self.idrac, job_uri,
+ max_job_wait_sec=job_wait_timeout,
+ sleep_interval_secs=1)
+ job_dict = remove_key(job_dict, regex_pattern='(.*?)@odata')
+ if job_failed:
+ self.module.exit_json(msg=job_dict.get("Message"), job_status=job_dict, failed=True)
+
+ def wait_for_port_open(self, interval=45):
+ timeout_wait = self.module.params.get('job_wait_timeout')
+ time.sleep(interval)
+ msg = RESET_UNTRACK
+ wait = timeout_wait
+ track_failed = True
+ status_code = 503
+ while int(wait) > 0:
+ try:
+ self.idrac.invoke_request(MANAGERS_URI, 'GET')
+ time.sleep(interval)
+ msg = IDRAC_RESET_SUCCESS_MSG
+ track_failed = False
+ status_code = 200
+ break
+ except HTTPError as err:
+ status_code = err.code
+ if status_code == 401:
+ time.sleep(interval // 2)
+ msg = IDRAC_RESET_SUCCESS_MSG
+ track_failed = False
+ break
+ except Exception:
+ time.sleep(interval)
+ wait = wait - interval
+ return track_failed, status_code, msg
+
+ def reset_to_default_mapped(self):
+ payload = {"ResetType": self.reset_to_default}
+ self.allowed_choices, is_valid_option = self.validate_obj.validate_reset_options(RESET_KEY)
+ if not is_valid_option:
+ self.module.exit_json(msg=RESET_TO_DEFAULT_ERROR.format(reset_to_default=self.reset_to_default, supported_values=self.allowed_choices),
+ skipped=True)
+ return self.perform_operation(payload)
+
+ def get_xml_content(self, file_path):
+ with open(file_path, 'r') as file:
+ xml_content = file.read()
+ return xml_content
+
+ def reset_custom_defaults(self):
+ self.allowed_choices, is_valid_option = self.validate_obj.validate_reset_options(RESET_KEY)
+ if LooseVersion(self.idrac_firmware_version) < MINIMUM_SUPPORTED_FIRMWARE_VERSION:
+ self.module.exit_json(msg=CUSTOM_ERROR.format(reset_to_default=self.reset_to_default,
+ supported_values=self.allowed_choices), skipped=True)
+ custom_default_file = self.module.params.get('custom_defaults_file')
+ custom_default_buffer = self.module.params.get('custom_defaults_buffer')
+ upload_perfom = False
+ default_data = None
+ if custom_default_file:
+ self.validate_obj.validate_path(custom_default_file)
+ self.validate_obj.validate_file_format(custom_default_file)
+ upload_perfom = True
+ default_data = self.get_xml_content(custom_default_file)
+ elif custom_default_buffer:
+ upload_perfom = True
+ default_data = custom_default_buffer
+ if upload_perfom:
+ self.upload_cd_content(default_data)
+ self.validate_obj.validate_custom_option(self.reset_to_default, self.allowed_choices)
+ return self.reset_to_default_mapped()
+
+ def graceful_restart(self):
+ url = None
+ resp = get_dynamic_uri(self.idrac, self.uri, ACTIONS)
+ if resp:
+ url = resp.get('#Manager.Reset', {}).get('target', {})
+ payload = {"ResetType": "GracefulRestart"}
+ run_reset_status = self.idrac.invoke_request(url, "POST", data=payload)
+ status = run_reset_status.status_code
+ tmp_res, resp = self.create_output(status)
+ if status in STATUS_SUCCESS:
+ tmp_res['msg'] = IDRAC_RESET_SUCCESS_MSG
+ resp['idracreset']['Message'] = IDRAC_RESET_RESTART_SUCCESS_MSG if self.wait_for_idrac else IDRAC_RESET_RESTART_TRIGGER_MSG
+ return tmp_res, resp
def main():
- specs = {}
+ specs = {
+ "reset_to_default": {"choices": ['All', 'ResetAllWithRootDefaults', 'Default', 'CustomDefaults']},
+ "custom_defaults_file": {"type": "str"},
+ "custom_defaults_buffer": {"type": "str"},
+ "wait_for_idrac": {"type": "bool", "default": True},
+ "job_wait_timeout": {"type": 'int', "default": 600},
+ "force_reset": {"type": "bool", "default": False}
+ }
specs.update(idrac_auth_params)
module = AnsibleModule(
argument_spec=specs,
+ mutually_exclusive=[("custom_defaults_file", "custom_defaults_buffer")],
supports_check_mode=True)
-
try:
- with iDRACConnection(module.params) as idrac:
- msg = run_idrac_reset(idrac, module)
+ with iDRACRedfishAPI(module.params) as idrac:
+ allowed_choices = specs['reset_to_default']['choices']
+ reset_obj = FactoryReset(idrac, module, allowed_choices)
+ message_resp, output = reset_obj.execute()
+ if output:
+ if not message_resp.get('changed'):
+ module.exit_json(msg=message_resp.get('msg'), reset_status=output, failed=True)
+ module.exit_json(msg=message_resp.get('msg'), reset_status=output, changed=True)
+ else:
+ if not message_resp.get('changed'):
+ module.exit_json(msg=message_resp.get('msg'), failed=True)
+ module.exit_json(msg=message_resp.get('msg'), changed=True)
except HTTPError as err:
- module.fail_json(msg=str(err), error_info=json.load(err))
+ module.exit_json(msg=str(err), error_info=json.load(err), failed=True)
except URLError as err:
module.exit_json(msg=str(err), unreachable=True)
- except (RuntimeError, SSLValidationError, ConnectionError, KeyError,
- ImportError, ValueError, TypeError) as e:
- module.fail_json(msg=str(e))
- module.exit_json(msg="Successfully performed iDRAC reset.", reset_status=msg)
+ except (RuntimeError, SSLValidationError, ConnectionError, TypeError, KeyError, ValueError, OSError) as err:
+ module.exit_json(msg=str(err), failed=True)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
index bd7fe2c67..f89272ca3 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
@@ -723,20 +723,15 @@ def export_scp_redfish(module, idrac):
share, scp_file_name_format = get_scp_share_details(module)
scp_components = ",".join(module.params["scp_components"])
include_in_export = IN_EXPORTS[module.params["include_in_export"]]
+ scp_response = idrac.export_scp(export_format=module.params["export_format"],
+ export_use=module.params["export_use"],
+ target=scp_components, include_in_export=include_in_export,
+ job_wait=False, share=share, ) # Assigning it as false because job tracking is done in idrac_redfish.py as well.
if share["share_type"] == "LOCAL":
- scp_response = idrac.export_scp(export_format=module.params["export_format"],
- export_use=module.params["export_use"],
- target=scp_components, include_in_export=include_in_export,
- job_wait=False, share=share, )
scp_response = wait_for_response(scp_response, module, share, idrac)
- else:
- scp_response = idrac.export_scp(export_format=module.params["export_format"],
- export_use=module.params["export_use"],
- target=scp_components, include_in_export=include_in_export,
- job_wait=False, share=share, ) # Assigning it as false because job tracking is done in idrac_redfish.py as well.
- scp_response = wait_for_job_tracking_redfish(
- module, idrac, scp_response
- )
+ scp_response = wait_for_job_tracking_redfish(
+ module, idrac, scp_response
+ )
scp_response = response_format_change(scp_response, module.params, scp_file_name_format)
exit_on_failure(module, scp_response, command)
return scp_response
@@ -753,8 +748,6 @@ def wait_for_response(scp_resp, module, share, idrac):
else:
wait_resp_value = wait_resp.decode("utf-8")
file_obj.write(wait_resp_value)
- if module.params["job_wait"]:
- scp_resp = idrac.invoke_request(job_uri, "GET")
return scp_resp
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py
new file mode 100644
index 000000000..3303b4ade
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py
@@ -0,0 +1,425 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.2.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = r"""
+---
+module: idrac_session
+short_description: Manage iDRAC sessions
+version_added: "9.2.0"
+description:
+ - This module allows the creation and deletion of sessions on iDRAC.
+options:
+ hostname:
+ description:
+ - IP address or hostname of the iDRAC.
+ type: str
+ username:
+ description:
+ - Username of the iDRAC.
+ - I(username) is required when I(state) is C(present).
+ type: str
+ password:
+ description:
+ - Password of the iDRAC.
+ - I(password) is required when I(state) is C(present).
+ type: str
+ port:
+ description:
+ - Port of the iDRAC.
+ type: int
+ default: 443
+ validate_certs:
+ description:
+ - If C(false), the SSL certificates will not be validated.
+ - Configure C(false) only on personally controlled sites where self-signed certificates are used.
+ type: bool
+ default: true
+ ca_path:
+ description:
+ - The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+ type: path
+ timeout:
+ description:
+ - The https socket level timeout in seconds.
+ type: int
+ default: 30
+ state:
+ description:
+ - The state of the session in an iDRAC.
+ - C(present) creates a session.
+ - C(absent) deletes a session.
+ - Module will always report changes found to be applied when I(state) is C(present).
+ choices: [present, absent]
+ type: str
+ default: present
+ auth_token:
+ description:
+ - Authentication token.
+ - I(auth_token) is required when I(state) is C(absent).
+ type: str
+ session_id:
+ description:
+ - Session ID of the iDRAC.
+ - I(session_id) is required when I(state) is C(absent).
+ type: int
+requirements:
+ - "python >= 3.9.6"
+author:
+ - "Rajshekar P(@rajshekarp87)"
+notes:
+ - Run this module from a system that has direct access to Dell iDRAC.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports C(check_mode).
+ - This module will always report changes found to be applied when I(state) is C(present).
+"""
+
+EXAMPLES = r"""
+---
+- name: Create a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ state: present
+
+- name: Delete a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ state: absent
+ auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_is: 2
+"""
+
+RETURN = r'''
+---
+msg:
+ description: Status of the session operation.
+ returned: always
+ type: str
+ sample: "The session has been created successfully."
+session_data:
+ description: The session details.
+ returned: For session creation operation
+ type: dict
+ sample: {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "The resource has been created successfully.",
+ "MessageArgs": [],
+ "MessageId": "Base.1.12.Created",
+ "RelatedProperties": [],
+ "Resolution": "None.",
+ "Severity": "OK"
+ },
+ {
+ "Message": "A new resource is successfully created.",
+ "MessageArgs": [],
+ "MessageId": "IDRAC.2.9.SYS414",
+ "RelatedProperties": [],
+ "Resolution": "No response action is required.",
+ "Severity": "Informational"
+ }
+ ],
+ "ClientOriginIPAddress": "100.96.37.58",
+ "CreatedTime": "2024-04-05T01:14:01-05:00",
+ "Description": "User Session",
+ "Id": "74",
+ "Name": "User Session",
+ "Password": null,
+ "SessionType": "Redfish",
+ "UserName": "root"
+ }
+x_auth_token:
+ description: Authentication token.
+ returned: For session creation operation
+ type: str
+ sample: "d15f17f01cd627c30173b1582642497d"
+error_info:
+ description: Details of the HTTP Error.
+ returned: On HTTP error
+ type: dict
+ sample: {
+ "error": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "Unable to complete the operation because an invalid username
+ and/or password is entered, and therefore authentication failed.",
+ "MessageArgs": [],
+ "MessageId": "IDRAC.2.9.SYS415",
+ "RelatedProperties": [],
+ "Resolution": "Enter valid user name and password and retry the operation.",
+ "Severity": "Warning"
+ }
+ ],
+ "code": "Base.1.12.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information"
+ }
+ }
+'''
+
+
+import json
+from urllib.error import HTTPError, URLError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.session_utils import SessionAPI
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.common.parameters import env_fallback
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
+ get_dynamic_uri, remove_key)
+
+REDFISH = "/redfish/v1"
+SESSIONS = "Sessions"
+ODATA = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+
+CREATE_SUCCESS_MSG = "The session has been created successfully."
+DELETE_SUCCESS_MSG = "The session has been deleted successfully."
+FAILURE_MSG = "Unable to '{operation}' a session."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_CHANGES_FOUND_MSG = "No changes found to be applied."
+
+
+class Session():
+ """
+ Parent class for all session operations.
+ """
+ def __init__(self, idrac, module):
+ """
+ Initializes the object with the given idrac and module parameters.
+
+ Args:
+ idrac (object): The idrac object.
+ module (object): The module object.
+
+ Returns:
+ None
+ """
+ self.idrac = idrac
+ self.module = module
+
+ def get_session_url(self):
+ """
+ Retrieves the URL for the sessions endpoint from the Redfish API.
+
+ Returns:
+ str: The URL for the sessions endpoint, or None if not found.
+ """
+ v1_resp = get_dynamic_uri(self.idrac, REDFISH)
+ sessions_url = v1_resp.get('Links', {}).get(SESSIONS, {}).get(ODATA, {})
+ return sessions_url
+
+
+class CreateSession(Session):
+ """
+ Creates a session.
+ """
+ def execute(self):
+ """
+ Executes the session creation process.
+
+ This function creates a session by sending a POST request to the session URL with the
+ provided username and password.
+ If the request is successful (status code 201), it retrieves the session details, removes
+ any OData keys from the response,
+ and extracts the X-Auth-Token from the response headers. It then exits the module with a
+ success message, indicating that
+ the session was created successfully, and provides the session data and X-Auth-Token as
+ output variables.
+
+ If the request fails (status code other than 201), it exits the module with a failure
+ message, indicating that the session creation failed.
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+ payload = {"UserName": self.module.params.get("username"),
+ "Password": self.module.params.get("password")}
+ session_url = self.get_session_url()
+ if self.module.check_mode:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ session_response = self.idrac.invoke_request(session_url, "POST", data=payload)
+ status = session_response.status_code
+ if status == 201:
+ session_details = session_response.json_data
+ session_data = remove_key(session_details, regex_pattern=ODATA_REGEX)
+ x_auth_token = session_response.headers.get('X-Auth-Token')
+ self.module.exit_json(msg=CREATE_SUCCESS_MSG,
+ changed=True,
+ session_data=session_data,
+ x_auth_token=x_auth_token)
+ else:
+ self.module.exit_json(msg=FAILURE_MSG.format(operation="create"), failed=True)
+
+
+class DeleteSession(Session):
+ """
+ Deletes a session.
+ """
+ def execute(self):
+ """
+ Executes the deletion of a session.
+
+ This function retrieves the session ID from the module parameters and constructs the
+ session URL using the `get_session_url` method. It then invokes a DELETE request to the
+ session URL with the session ID appended. The response from the request is stored in the
+ `session_response` variable.
+
+ If the response status code is 200, indicating a successful deletion, the function exits
+ the module with a success message and sets the `changed` parameter to True. Otherwise, it
+ exits the module with a failure message and sets the `failed` parameter to True.
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+ session_id = self.module.params.get("session_id")
+ session_url = self.get_session_url()
+ session_status = self.get_session_status(session_url, session_id)
+ if self.module.check_mode:
+ if session_status == 200:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ else:
+ self.module.exit_json(msg=NO_CHANGES_FOUND_MSG)
+ else:
+ if session_status == 200:
+ try:
+ session_response = self.idrac.invoke_request(session_url + f"/{session_id}",
+ "DELETE")
+ status = session_response.status_code
+ if status == 200:
+ self.module.exit_json(msg=DELETE_SUCCESS_MSG, changed=True)
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ self.module.exit_json(msg=FAILURE_MSG.format(operation="delete"),
+ error_info=filter_err,
+ failed=True)
+ else:
+ self.module.exit_json(msg=NO_CHANGES_FOUND_MSG)
+
+ def get_session_status(self, session_url, session_id):
+ """
+ Retrieves the status of a session given its URL and ID.
+
+ Args:
+ session_url (str): The URL of the session.
+ session_id (str): The ID of the session.
+
+
+ Returns:
+ int: The status code of the session status response. If an HTTPError occurs, the status
+ code of the error is returned.
+ """
+ try:
+ session_status_response = self.idrac.invoke_request(session_url + f"/{session_id}",
+ "GET")
+ session_status = session_status_response.status_code
+ except HTTPError as err:
+ session_status = err.status
+ return session_status
+
+
+def main():
+ """
+ Main function that initializes the Ansible module with the argument specs and required if
+ conditions.
+ It then creates a SessionAPI object with the module parameters and performs a session operation
+ based on the state parameter.
+ If the state is "present", it creates a CreateSession object and executes it. If the state is
+ "absent", it creates a DeleteSession object and executes it.
+ The session status is returned.
+
+ Raises:
+ HTTPError: If an HTTP error occurs, the error message and filtered error information are
+ returned in the module's exit_json.
+ URLError: If a URL error occurs, the error message is returned in the module's exit_json.
+ SSLValidationError, ConnectionError, TypeError, ValueError, OSError: If any other error
+ occurs, the error message is returned in the module's exit_json.
+
+ Returns:
+ None
+ """
+ specs = get_argument_spec()
+ module = AnsibleModule(
+ argument_spec=specs,
+ required_if=[
+ ["state", "present", ("username", "password",)],
+ ["state", "absent", ("auth_token", "session_id",)]
+ ],
+ supports_check_mode=True
+ )
+
+ try:
+ idrac = SessionAPI(module.params)
+ session_operation = module.params.get("state")
+ if session_operation == "present":
+ session_operation_obj = CreateSession(idrac, module)
+ else:
+ session_operation_obj = DeleteSession(idrac, module)
+ session_operation_obj.execute()
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ module.exit_json(msg=str(err), error_info=filter_err, failed=True)
+ except URLError as err:
+ module.exit_json(msg=str(err), unreachable=True)
+ except (SSLValidationError, ConnectionError, TypeError, ValueError, OSError) as err:
+ module.exit_json(msg=str(err), failed=True)
+
+
+def get_argument_spec():
+ """
+ Returns a dictionary representing the argument specification for a module.
+
+ The dictionary contains the following keys and their corresponding values:
+ - "hostname": A string representing the hostname.
+ - "username": A string representing the username. It has a fallback option to retrieve the
+ value from the environment variable 'IDRAC_USERNAME'.
+ - "password": A string representing the password. It is marked as not to be logged and has a
+ fallback option to retrieve the value from the environment variable 'IDRAC_PASSWORD'.
+ - "port": An integer representing the port number. The default value is 443.
+ - "validate_certs": A boolean representing whether to validate certificates. The default value
+ is True.
+ - "ca_path": A path representing the certificate authority path. The default value is None.
+ - "timeout": An integer representing the timeout value. The default value is 30.
+ - "state": A string representing the state. The default value is "present". The choices are
+ ["present", "absent"].
+ - "auth_token": A string representing the authentication token. It is marked as not to be
+ logged.
+ - "session_id": An integer representing the session ID.
+
+ Returns:
+ A dictionary representing the argument specification.
+ """
+ return {
+ "hostname": {"type": "str"},
+ "username": {"type": "str", "fallback": (env_fallback, ['IDRAC_USERNAME'])},
+ "password": {"type": "str", "no_log": True, "fallback": (env_fallback, ['IDRAC_PASSWORD'])},
+ "port": {"type": "int", "default": 443},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path", "default": None},
+ "timeout": {"type": "int", "default": 30},
+ "state": {"type": 'str', "default": "present", "choices": ["present", "absent"]},
+ "auth_token": {"type": "str", "no_log": True},
+ "session_id": {"type": "int"}
+ }
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py
new file mode 100644
index 000000000..13c1ec065
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py
@@ -0,0 +1,924 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.1.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = r'''
+---
+module: idrac_storage_volume
+short_description: Configures the RAID configuration attributes
+version_added: "9.1.0"
+description:
+ - This module is responsible for configuring the RAID attributes.
+extends_documentation_fragment:
+ - dellemc.openmanage.idrac_auth_options
+options:
+ state:
+ type: str
+ description:
+ - C(create), performs create volume operation.
+ - C(delete), performs remove volume operation.
+ - C(view), returns storage view.
+ choices: ['create', 'delete', 'view']
+ default: 'view'
+ span_depth:
+ type: int
+ description:
+ - Number of spans in the RAID configuration.
+ - I(span_depth) is required for C(create) and its value depends on I(volume_type).
+ default: 1
+ span_length:
+ type: int
+ description:
+ - Number of disks in a span.
+ - I(span_length) is required for C(create) and its value depends on I(volume_type).
+ default: 1
+ number_dedicated_hot_spare:
+ type: int
+ description: Number of Dedicated Hot Spare.
+ default: 0
+ volume_type:
+ type: str
+ description: Provide the the required RAID level.
+ choices: ['RAID 0', 'RAID 1', 'RAID 5', 'RAID 6', 'RAID 10', 'RAID 50', 'RAID 60']
+ default: 'RAID 0'
+ disk_cache_policy:
+ type: str
+ description: Disk Cache Policy.
+ choices: ["Default", "Enabled", "Disabled"]
+ default: "Default"
+ write_cache_policy:
+ type: str
+ description: Write cache policy.
+ choices: ["WriteThrough", "WriteBack", "WriteBackForce"]
+ default: "WriteThrough"
+ read_cache_policy:
+ type: str
+ description: Read cache policy.
+ choices: ["NoReadAhead", "ReadAhead", "AdaptiveReadAhead"]
+ default: "NoReadAhead"
+ stripe_size:
+ type: int
+ description: Stripe size value to be provided in multiples of 64 * 1024.
+ default: 65536
+ controller_id:
+ type: str
+ description:
+ - Fully Qualified Device Descriptor (FQDD) of the storage controller, for example 'RAID.Integrated.1-1'.
+ Controller FQDD is required for C(create) RAID configuration.
+ media_type:
+ type: str
+ description: Media type.
+ choices: ['HDD', 'SSD']
+ protocol:
+ type: str
+ description: Bus protocol.
+ choices: ['SAS', 'SATA', 'PCIE']
+ volume_id:
+ type: str
+ description:
+ - Fully Qualified Device Descriptor (FQDD) of the virtual disk, for example 'Disk.virtual.0:RAID.Slot.1-1'.
+ This option is used to get the virtual disk information.
+ volumes:
+ type: list
+ elements: dict
+ description:
+ - A list of virtual disk specific iDRAC attributes. This is applicable for C(create) and C(delete) operations.
+ - For C(create) operation, name and drives are applicable options, other volume options can also be specified.
+ - The drives is a required option for C(create) operation and accepts either location (list of drive slot)
+ or id (list of drive fqdd).
+ - In iDRAC8, there is no pre-validation for the state of drives. The disk ID or slot number of the drive
+ provided may or may not be in Ready state. Enter the disk ID or slot number of the drive that is already
+ in Ready state.
+ - For C(delete) operation, only name option is applicable.
+ - See the examples for more details.
+ capacity:
+ type: float
+ description: Virtual disk size in GB.
+ raid_reset_config:
+ type: str
+ description:
+ - This option represents whether a reset config operation needs to be performed on the RAID controller.
+ Reset Config operation deletes all the virtual disks present on the RAID controller.
+ choices: ['true', 'false']
+ default: 'false'
+ raid_init_operation:
+ type: str
+ description: This option represents initialization configuration operation to be performed on the virtual disk.
+ choices: [None, Fast]
+ job_wait:
+ description:
+ - This parameter provides the option to wait for the job completion.
+ - This is applicable when I(state) is C(create) or C(delete).
+ type: bool
+ default: true
+ job_wait_timeout:
+ description:
+ - This parameter is the maximum wait time of I(job_wait) in seconds.
+ - This option is applicable when I(job_wait) is C(true).
+ type: int
+ default: 900
+
+requirements:
+ - "python >= 3.9.6"
+author:
+ - "Felix Stephen (@felixs88)"
+ - "Kritika Bhateja (@Kritika-Bhateja-03)"
+ - "Abhishek Sinha(@ABHISHEK-SINHA10)"
+notes:
+ - Run this module from a system that has direct access to Integrated Dell Remote Access Controller.
+ - This module supports both IPv4 and IPv6 address for I(idrac_ip).
+ - This module supports C(check_mode).
+ - This module does not display the controller battery details for the C(view) operation of the storage in iDRAC8.
+'''
+
+EXAMPLES = r'''
+---
+- name: Create single volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "create"
+ controller_id: "RAID.Slot.1-1"
+ volumes:
+ - drives:
+ location: [5]
+
+- name: Create multiple volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ raid_reset_config: "True"
+ state: "create"
+ controller_id: "RAID.Slot.1-1"
+ volume_type: "RAID 1"
+ span_depth: 1
+ span_length: 2
+ number_dedicated_hot_spare: 1
+ disk_cache_policy: "Enabled"
+ write_cache_policy: "WriteBackForce"
+ read_cache_policy: "ReadAhead"
+ stripe_size: 65536
+ capacity: 100
+ raid_init_operation: "Fast"
+ volumes:
+ - name: "volume_1"
+ drives:
+ id: ["Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1", "Disk.Bay.2:Enclosure.Internal.0-1:RAID.Slot.1-1"]
+ - name: "volume_2"
+ volume_type: "RAID 5"
+ span_length: 3
+ span_depth: 1
+ drives:
+ location: [7, 3, 5]
+ disk_cache_policy: "Disabled"
+ write_cache_policy: "WriteBack"
+ read_cache_policy: "NoReadAhead"
+ stripe_size: 131072
+ capacity: "200"
+ raid_init_operation: "None"
+
+- name: View all volume details
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "view"
+
+- name: View specific volume details
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "view"
+ controller_id: "RAID.Slot.1-1"
+ volume_id: "Disk.Virtual.0:RAID.Slot.1-1"
+
+- name: Delete single volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "delete"
+ volumes:
+ - name: "volume_1"
+
+- name: Delete multiple volume
+ dellemc.openmanage.idrac_storage_volume:
+ idrac_ip: "192.168.0.1"
+ idrac_user: "username"
+ idrac_password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "delete"
+ volumes:
+ - name: "volume_1"
+ - name: "volume_2"
+'''
+
+RETURN = r'''
+---
+msg:
+ type: str
+ description: Overall status of the storage configuration operation.
+ returned: always
+ sample: "Successfully completed the view storage volume operation"
+storage_status:
+ type: dict
+ description: Storage configuration job and progress details from the iDRAC.
+ returned: success
+ sample:
+ {
+ "Id": "JID_XXXXXXXXX",
+ "JobState": "Completed",
+ "JobType": "ImportConfiguration",
+ "Message": "Successfully imported and applied Server Configuration Profile.",
+ "MessageId": "XXX123",
+ "Name": "Import Configuration",
+ "PercentComplete": 100,
+ "StartTime": "TIME_NOW",
+ "TargetSettingsURI": null,
+ }
+error_info:
+ description: Details of the HTTP Error.
+ returned: on HTTP error
+ type: dict
+ sample: {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "GEN1234",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }
+ }
+'''
+
+import re
+import operator
+from urllib.error import HTTPError, URLError
+from copy import deepcopy
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
+ get_dynamic_uri, validate_and_get_first_resource_id_uri, xml_data_conversion, idrac_redfish_job_tracking, remove_key, get_idrac_firmware_version)
+
+
+SYSTEMS_URI = "/redfish/v1/Systems"
+iDRAC_JOB_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/{job_id}"
+CONTROLLER_NOT_EXIST_ERROR = "Specified Controller {controller_id} does not exist in the System."
+CONTROLLER_NOT_DEFINED = "Controller ID is required."
+SUCCESSFUL_OPERATION_MSG = "Successfully completed the {operation} storage volume operation."
+DRIVES_NOT_EXIST_ERROR = "No Drive(s) are attached to the specified Controller Id: {controller_id}."
+DRIVES_NOT_MATCHED = "Following Drive(s) {specified_drives} are not attached to the specified Controller Id: {controller_id}."
+NEGATIVE_OR_ZERO_MSG = "The value for the `{parameter}` parameter cannot be negative or zero."
+NEGATIVE_MSG = "The value for the `{parameter}` parameter cannot be negative."
+INVALID_VALUE_MSG = "The value for the `{parameter}` parameter is invalid."
+ID_AND_LOCATION_BOTH_DEFINED = "Either id or location is allowed."
+ID_AND_LOCATION_BOTH_NOT_DEFINED = "Either id or location should be specified."
+DRIVES_NOT_DEFINED = "Drives must be defined for volume creation."
+NOT_ENOUGH_DRIVES = "Number of sufficient disks not found in Controller '{controller_id}'!"
+WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+JOB_TRIGERRED = "Successfully triggered the {0} storage volume operation."
+VOLUME_NAME_REQUIRED_FOR_DELETE = "Virtual disk name is a required parameter for remove virtual disk operations."
+VOLUME_NOT_FOUND = "Unable to find the virtual disk."
+CHANGES_NOT_FOUND = "No changes found to commit!"
+CHANGES_FOUND = "Changes found to commit!"
+ODATA_ID = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+ATTRIBUTE = "</Attribute>"
+VIEW_OPERATION_FAILED = "Failed to fetch storage details."
+VIEW_CONTROLLER_DETAILS_NOT_FOUND = "Failed to find the controller {controller_id}."
+VIEW_OPERATION_CONTROLLER_NOT_SPECIFIED = "Controller identifier parameter is missing."
+VIEW_VIRTUAL_DISK_DETAILS_NOT_FOUND = "Failed to find the volume : {volume_id} in controller : {controller_id}."
+SUCCESS_STATUS = "Success"
+FAILED_STATUS = "Failed"
+ERROR_CODES = ["SYS041", "SYS044", "SYS045", "SYS046", "SYS047", "SYS048", "SYS050", "SYS051", "SYS062",
+ "SYS063", "SYS064", "SYS065", "SYS067", "SYS068", "SYS070", "SYS071", "SYS072",
+ "SYS073", "SYS075", "SYS076", "SYS077", "SYS078", "SYS079", "SYS080"]
+
+
+class StorageBase:
+ def __init__(self, idrac, module):
+ self.module_ext_params = self.module_extend_input(module)
+ self.idrac = idrac
+ self.module = module
+
+ def data_conversion(self, module, each_volume):
+ volume_related_input = [
+ 'volume_type', 'span_length', 'span_depth',
+ 'number_dedicated_hot_spare', 'disk_cache_policy',
+ 'write_cache_policy', 'read_cache_policy', 'stripe_size',
+ 'capacity', 'raid_init_operation', 'protocol', 'media_type'
+ ]
+ for key in volume_related_input:
+ value = module.params.get(key)
+ if key not in each_volume:
+ each_volume[key] = value
+ return each_volume
+
+ def module_extend_input(self, module):
+ """
+ Extends the input module with additional volume-related parameters.
+
+ Args:
+ module (object): The module object.
+
+ Returns:
+ object: The updated module object.
+ """
+
+ module_copy = deepcopy(module.params)
+ volumes = module_copy.get('volumes')
+ if volumes:
+ for index in range(len(volumes)):
+ volumes[index] = self.data_conversion(module, volumes[index])
+ else:
+ tmp_volume = self.data_conversion(module, {})
+ required_pd = int(module_copy.get('span_length', 1)) * int(module_copy.get('span_depth', 1))
+ tmp_volume['drives'] = {'id': [(-i) for i in range(1, required_pd + 1)]}
+ module_copy['volumes'] = [tmp_volume]
+
+ int_input = ['span_length', 'span_depth', 'number_dedicated_hot_spare',
+ 'stripe_size']
+ if volumes:
+ for each_volume in volumes:
+ for each_input in each_volume:
+ if each_input in int_input:
+ each_volume[each_input] = int(each_volume[each_input])
+ return module_copy
+
+ def payload_for_disk(self, volume):
+ disk_payload = ''
+ if 'drives' in volume and 'id' in volume['drives']:
+ for each_pd_id in volume['drives']['id']:
+ scp = '<Attribute Name="IncludedPhysicalDiskID">{id}</Attribute>'.format(id=each_pd_id)
+ disk_payload = disk_payload + scp
+ if 'dedicated_hot_spare' in volume:
+ for each_dhs in volume['dedicated_hot_spare']:
+ scp = '<Attribute Name="RAIDdedicatedSpare">{id}</Attribute>'.format(id=each_dhs)
+ disk_payload = disk_payload + scp
+ return disk_payload
+
+ def construct_volume_payload(self, vd_id, volume):
+
+ """
+ Constructs a payload dictionary for the given key mappings.
+
+ Returns:
+ dict: The constructed payload dictionary.
+ """
+ key_mapping: dict = {
+ 'raid_init_operation': 'RAIDinitOperation',
+ 'state': "RAIDaction",
+ 'disk_cache_policy': "DiskCachePolicy",
+ 'write_cache_policy': "RAIDdefaultWritePolicy",
+ 'read_cache_policy': "RAIDdefaultReadPolicy",
+ 'stripe_size': "StripeSize",
+ 'span_depth': "SpanDepth",
+ 'span_length': "SpanLength",
+ 'volume_type': "RAIDTypes",
+ 'name': 'Name',
+ 'capacity': 'Size',
+ }
+ controller_id = self.module_ext_params.get("controller_id")
+ state = self.module_ext_params.get("state")
+ # Including state in each_volume as it mapped to RAIDaction
+ volume.update({'state': state.capitalize()})
+ payload = ''
+ attr = {}
+ vdfqdd = "Disk.Virtual.{0}:{1}".format(vd_id, controller_id)
+ for key in volume:
+ if volume[key] and key in key_mapping:
+ attr[key_mapping[key]] = volume[key]
+ disk_paylod = self.payload_for_disk(volume)
+ payload = xml_data_conversion(attr, vdfqdd, disk_paylod)
+ return payload
+
+ def constuct_payload(self, name_id_mapping):
+ number_of_existing_vd = len(name_id_mapping)
+ volume_payload, attr = '', {}
+ raid_reset_config_value = self.module_ext_params.get('raid_reset_config')
+ raid_key_mapping = {'raid_reset_config': 'RAIDresetConfig'}
+ if raid_reset_config_value == 'true':
+ raid_reset_config_value = 'True'
+ attr = {raid_key_mapping['raid_reset_config']: raid_reset_config_value}
+ for each_volume in self.module_ext_params.get('volumes'):
+ volume_payload = volume_payload + self.construct_volume_payload(number_of_existing_vd,
+ each_volume)
+ number_of_existing_vd = number_of_existing_vd + 1
+ raid_payload = xml_data_conversion(attr, self.module_ext_params.get('controller_id'), volume_payload)
+ return raid_payload
+
+ def wait_for_job_completion(self, job_resp):
+ job_wait = self.module_ext_params.get('job_wait')
+ job_wait_timeout = self.module_ext_params.get('job_wait_timeout')
+ job_dict = {}
+ if (job_tracking_uri := job_resp.headers.get("Location")):
+ job_id = job_tracking_uri.split("/")[-1]
+ job_uri = iDRAC_JOB_URI.format(job_id=job_id)
+ if job_wait:
+ job_failed, msg, job_dict, wait_time = idrac_redfish_job_tracking(self.idrac, job_uri,
+ max_job_wait_sec=job_wait_timeout,
+ sleep_interval_secs=1)
+ job_dict = remove_key(job_dict, regex_pattern=ODATA_REGEX)
+ if int(wait_time) >= int(job_wait_timeout):
+ self.module.exit_json(msg=WAIT_TIMEOUT_MSG.format(job_wait_timeout), changed=True, storage_status=job_dict)
+ if job_failed or job_dict.get("MessageId", "") in ERROR_CODES:
+ self.module.exit_json(msg=job_dict.get("Message"), storage_status=job_dict, failed=True)
+ else:
+ job_resp = self.idrac.invoke_request(job_uri, 'GET')
+ job_dict = job_resp.json_data
+ job_dict = remove_key(job_dict, regex_pattern=ODATA_REGEX)
+ self.module.exit_json(msg=JOB_TRIGERRED.format(self.module.params.get('state')), storage_status=job_dict, changed=True)
+ return job_dict
+
+
+class StorageData:
+ def __init__(self, idrac, module):
+ self.idrac = idrac
+ self.module = module
+
+ def fetch_controllers_uri(self):
+ uri, err_msg = validate_and_get_first_resource_id_uri(
+ self.module, self.idrac, SYSTEMS_URI)
+ if err_msg:
+ self.module.exit_json(msg=err_msg, failed=True)
+ storage_controllers = get_dynamic_uri(self.idrac, uri, 'Storage')
+ return storage_controllers
+
+ def fetch_api_data(self, uri, key_index_from_end):
+ key = uri.split("/")[key_index_from_end]
+ uri_data = self.idrac.invoke_request(uri, "GET")
+ return key, uri_data
+
+ def all_storage_data(self):
+ storage_info = {"Controllers": {}}
+ controllers_details_uri = self.fetch_controllers_uri()[ODATA_ID] + "?$expand=*($levels=1)"
+ controllers_list = get_dynamic_uri(self.idrac, controllers_details_uri)
+ for each_controller in controllers_list["Members"]:
+ controller_id = each_controller.get("Id")
+ if controller_id.startswith("CPU"):
+ continue
+ storage_info["Controllers"][controller_id] = deepcopy(each_controller)
+ storage_info["Controllers"][controller_id]["Drives"] = {}
+ storage_info["Controllers"][controller_id]["Volumes"] = {}
+ storage_info["Controllers"][controller_id]["Links"]["Enclosures"] = {}
+ # To fetch drives data
+ for each_drive_uri in each_controller["Drives"]:
+ key, uri_data = self.fetch_api_data(each_drive_uri[ODATA_ID], -1)
+ storage_info["Controllers"][controller_id]["Drives"][key] = uri_data.json_data
+
+ # To fetch volumes data
+ volume_uri = each_controller['Volumes'][ODATA_ID]
+ volumes_list = get_dynamic_uri(self.idrac, volume_uri, "Members")
+ for each_volume_uri in volumes_list:
+ key, uri_data = self.fetch_api_data(each_volume_uri[ODATA_ID], -1)
+ storage_info["Controllers"][controller_id]["Volumes"][key] = uri_data.json_data
+ # To fetch enclosures
+ for each_enclosure_uri in each_controller["Links"]["Enclosures"]:
+ key, uri_data = self.fetch_api_data(each_enclosure_uri[ODATA_ID], -1)
+ storage_info["Controllers"][controller_id]["Links"]["Enclosures"][key] = uri_data.json_data
+ return storage_info
+
+ def fetch_storage_data(self):
+ storage_info = {"Controller": {}}
+ storage_data = self.all_storage_data()
+ firm_ver = get_idrac_firmware_version(self.idrac)
+ for controller_id, controller_data in storage_data["Controllers"].items():
+ storage_info["Controller"][controller_id] = {
+ "ControllerSensor": {controller_id: {}}
+ }
+ if firm_ver >= "3.00":
+ battery_data = controller_data["Oem"]["Dell"].get("DellControllerBattery")
+ if battery_data:
+ storage_info["Controller"][controller_id]["ControllerSensor"][controller_id]["ControllerBattery"] = [battery_data["Id"]]
+ self.fetch_volumes(controller_id, controller_data, storage_info)
+ self.fetch_enclosures_and_physical_disk(controller_id, controller_data, storage_info)
+ return storage_info
+
+ def fetch_volumes(self, controller_id, controller_data, storage_info):
+ if controller_data["Volumes"]:
+ storage_info.setdefault("Controller", {}).setdefault(controller_id, {})["VirtualDisk"] = {}
+ for volume_id, volume_data in controller_data["Volumes"].items():
+ physical_disk = [self.fetch_api_data(drive[ODATA_ID], -1)[0] for drive in volume_data["Links"]["Drives"]]
+ storage_info["Controller"][controller_id]["VirtualDisk"][volume_id] = {"PhysicalDisk": physical_disk}
+
+ def fetch_enclosures_and_physical_disk(self, controller_id, controller_data, storage_info):
+ enclosures = [enclosure_id for enclosure_id in controller_data["Links"]["Enclosures"].keys() if enclosure_id.startswith("Enclosure")]
+ if len(enclosures) >= 1:
+ storage_info.setdefault("Controller", {})
+ storage_info["Controller"].setdefault(controller_id, {})
+ storage_info["Controller"][controller_id].setdefault("Enclosure", {})
+ for enclosure_id in enclosures:
+ storage_info["Controller"][controller_id]["Enclosure"][enclosure_id] = {"EnclosureSensor": {enclosure_id: {}}}
+ physical_disk = [self.fetch_api_data(drive[ODATA_ID], -1)[0] for drive in
+ controller_data["Links"]["Enclosures"][enclosure_id]["Links"]["Drives"]]
+ if physical_disk:
+ storage_info["Controller"][controller_id]["Enclosure"][enclosure_id]["PhysicalDisk"] = physical_disk
+ else:
+ if controller_data["Drives"].keys():
+ storage_info["Controller"][controller_id]["PhysicalDisk"] = list(controller_data["Drives"].keys())
+
+
+class StorageValidation(StorageBase):
+ def __init__(self, idrac, module):
+ super().__init__(idrac, module)
+ self.idrac_data = StorageData(idrac, module).all_storage_data()
+ self.controller_id = module.params.get("controller_id")
+
+ def validate_controller_exists(self):
+ if not self.controller_id:
+ self.module.exit_json(msg=CONTROLLER_NOT_DEFINED, failed=True)
+ controllers = self.idrac_data["Controllers"]
+ if self.controller_id not in controllers.keys():
+ self.module.exit_json(msg=CONTROLLER_NOT_EXIST_ERROR.format(controller_id=self.controller_id), failed=True)
+
+ def validate_job_wait_negative_values(self):
+ if self.module_ext_params.get("job_wait") and self.module_ext_params.get("job_wait_timeout") <= 0:
+ self.module.exit_json(msg=NEGATIVE_OR_ZERO_MSG.format(parameter="job_wait_timeout"), failed=True)
+
+ def validate_negative_values_for_volume_params(self, each_volume):
+ inner_params = ["span_depth", "span_length", "capacity", "strip_size"]
+ for param in inner_params:
+ value = each_volume.get(param)
+ if value is not None and value <= 0:
+ self.module.exit_json(msg=NEGATIVE_OR_ZERO_MSG.format(parameter=param), failed=True)
+ if each_volume.get("number_dedicated_hot_spare") < 0:
+ self.module.exit_json(msg=NEGATIVE_MSG.format(parameter="number_dedicated_hot_spare"), failed=True)
+
+ def validate_volume_drives(self, specified_volume):
+ specified_drives = specified_volume.get("drives")
+ if not specified_drives:
+ self.module.exit_json(msg=DRIVES_NOT_DEFINED, failed=True)
+ if specified_drives.get("id") and specified_drives.get("location"):
+ self.module.exit_json(msg=ID_AND_LOCATION_BOTH_DEFINED, failed=True)
+ elif "id" not in specified_drives and "location" not in specified_drives:
+ self.module.exit_json(msg=ID_AND_LOCATION_BOTH_NOT_DEFINED, failed=True)
+ drives_count = len(specified_drives.get("location")) if specified_drives.get("location") is not None else len(specified_drives.get("id"))
+ return self.raid_std_validation(specified_volume.get("span_length"),
+ specified_volume.get("span_depth"),
+ specified_volume.get("volume_type"),
+ drives_count)
+
+ def raid_std_validation(self, span_length, span_depth, volume_type, pd_count):
+ raid_std = {
+ "RAID 0": {'pd_slots': range(1, 2), 'span_length': 1, 'checks': operator.ge, 'span_depth': 1},
+ "RAID 1": {'pd_slots': range(1, 3), 'span_length': 2, 'checks': operator.eq, 'span_depth': 1},
+ "RAID 5": {'pd_slots': range(1, 4), 'span_length': 3, 'checks': operator.ge, 'span_depth': 1},
+ "RAID 6": {'pd_slots': range(1, 5), 'span_length': 4, 'checks': operator.ge, 'span_depth': 1},
+ "RAID 10": {'pd_slots': range(1, 5), 'span_length': 2, 'checks': operator.ge, 'span_depth': 2},
+ "RAID 50": {'pd_slots': range(1, 7), 'span_length': 3, 'checks': operator.ge, 'span_depth': 2},
+ "RAID 60": {'pd_slots': range(1, 9), 'span_length': 4, 'checks': operator.ge, 'span_depth': 2}
+ }
+ raid_info = raid_std.get(volume_type)
+ if not raid_info.get('checks')(span_length, raid_info.get('span_length')):
+ self.module.exit_json(msg=INVALID_VALUE_MSG.format(parameter="span_length"), failed=True)
+ if volume_type in ["RAID 0", "RAID 1", "RAID 5", "RAID 6"] and operator.ne(span_depth, raid_info.get('span_depth')):
+ self.module.exit_json(msg=INVALID_VALUE_MSG.format(parameter="span_depth"), failed=True)
+ if volume_type in ["RAID 10", "RAID 50", "RAID 60"] and operator.lt(span_depth, raid_info.get('span_depth')):
+ self.module.exit_json(msg=INVALID_VALUE_MSG.format(parameter="span_depth"), failed=True)
+ if not operator.eq(pd_count, span_depth * span_length):
+ self.module.exit_json(msg=INVALID_VALUE_MSG.format(parameter="drives"), failed=True)
+ return True
+
+
+class StorageCreate(StorageValidation):
+ def disk_slot_location_to_id_conversion(self, each_volume):
+ drives = {}
+ if "location" in each_volume['drives']:
+ regex_pattern = r"\d+"
+ physical_disk = self.idrac_data["Controllers"][self.controller_id]["Drives"]
+ slot_id_mapping = {int(re.search(regex_pattern, key).group()): key for key in physical_disk.keys()}
+ drives['id'] = [slot_id_mapping.get(each_pd) for each_pd in each_volume['drives']['location']
+ if slot_id_mapping.get(each_pd)]
+ elif "id" in each_volume['drives']:
+ drives['id'] = each_volume['drives']['id']
+ return drives
+
+ def perform_intersection_on_disk(self, each_volume, healthy_disk, available_disk,
+ media_type_supported_disk, protocol_supported_disk):
+ filtered_disk = healthy_disk
+ firm_ver = get_idrac_firmware_version(self.idrac)
+ if firm_ver >= "3.00":
+ filtered_disk = filtered_disk.intersection(available_disk)
+ if filtered_disk and each_volume.get('media_type'):
+ filtered_disk = filtered_disk.intersection(media_type_supported_disk)
+ if filtered_disk and each_volume.get('protocol'):
+ filtered_disk = filtered_disk.intersection(protocol_supported_disk)
+ return sorted(list(filtered_disk))
+
+ def filter_disk(self, each_volume):
+ disk_dict = self.idrac_data["Controllers"][self.controller_id]["Drives"]
+ healthy_disk = set()
+ available_disk = set()
+ media_type_supported_disk = set()
+ protocol_supported_disk = set()
+ raid_reset_config_value = self.module_ext_params.get('raid_reset_config')
+ raid_status_list = ["Ready", "NonRAID"]
+ if raid_reset_config_value == "true":
+ raid_status_list.append("Online")
+ for key, value in disk_dict.items():
+ if each_volume.get('media_type') and value.get('MediaType') == each_volume.get('media_type'):
+ media_type_supported_disk.add(key)
+ if each_volume.get('protocol') and value.get('Protocol') == each_volume.get('protocol'):
+ protocol_supported_disk.add(key)
+ status = value.get('Status', {}).get('Health', {})
+ if status == "OK":
+ healthy_disk.add(key)
+ raid_status = value.get('Oem', {}).get('Dell', {}).get('DellPhysicalDisk', {}).get('RaidStatus', {})
+ if raid_status in raid_status_list:
+ available_disk.add(key)
+ return self.perform_intersection_on_disk(each_volume, healthy_disk, available_disk,
+ media_type_supported_disk, protocol_supported_disk)
+
+ def updating_drives_module_input_when_given(self, each_volume, filter_disk_output):
+ updated_disk_id_list = []
+ if 'id' in each_volume['drives']:
+ for each_pd in each_volume['drives']['id']:
+ if each_pd in filter_disk_output:
+ updated_disk_id_list.append(each_pd)
+ return updated_disk_id_list
+
+ def updating_volume_module_input_for_hotspare(self, each_volume, filter_disk_output, reserved_pd, drives_exists_in_id):
+ tmp_list = []
+ if 'number_dedicated_hot_spare' in each_volume and each_volume['number_dedicated_hot_spare'] > 0:
+ for each_pd in filter_disk_output:
+ if each_pd not in reserved_pd and each_pd not in drives_exists_in_id:
+ tmp_list.append(each_pd)
+ if len(tmp_list) == each_volume['number_dedicated_hot_spare']:
+ break
+ return tmp_list
+
+ def updating_volume_module_input(self, drives_exists_in_id):
+ volumes = self.module_ext_params.get('volumes', [])
+ reserved_pd = []
+ for each in volumes:
+ required_pd = int(each['span_depth']) * int(each['span_length'])
+ filtered_disk = self.filter_disk(each)
+ if 'stripe_size' in each:
+ each['stripe_size'] = int(each['stripe_size'] / 512)
+
+ if each.get('capacity') is not None:
+ each['capacity'] = str(int(float(each['capacity']) * 1073741824))
+
+ if self.module.params.get('volumes') is None:
+ each['drives']['id'] = filtered_disk[:required_pd]
+
+ if 'drives' in each:
+ drives_id_list = self.updating_drives_module_input_when_given(each, filtered_disk)
+ reserved_pd += drives_id_list
+ each['drives']['id'] = drives_id_list
+
+ if 'number_dedicated_hot_spare' in each:
+ hotspare_disk_list = self.updating_volume_module_input_for_hotspare(each, filtered_disk, reserved_pd,
+ drives_exists_in_id)
+ reserved_pd += hotspare_disk_list
+ each['dedicated_hot_spare'] = hotspare_disk_list
+ self.validate_enough_drives_available(each)
+ if self.module.check_mode:
+ self.module.exit_json(msg=CHANGES_FOUND, changed=True)
+ self.module_ext_params['volumes'] = volumes
+
+ def validate_enough_drives_available(self, each_volume):
+ controller_id = self.module_ext_params.get('controller_id')
+ required_pd = each_volume['span_depth'] * each_volume['span_length']
+ drives_available = each_volume['drives']['id']
+ dedicated_hot_spare_required = int(each_volume['number_dedicated_hot_spare'])
+ dedicated_hot_spare_available = len(each_volume['dedicated_hot_spare'])
+ changed, failed = False, False
+ if (required_pd > len(drives_available) or dedicated_hot_spare_required != dedicated_hot_spare_available):
+ if not self.module.check_mode:
+ msg, failed = NOT_ENOUGH_DRIVES.format(controller_id=controller_id), True
+ else:
+ msg, changed = CHANGES_NOT_FOUND, False
+ self.module.exit_json(msg=msg, changed=changed, failed=failed)
+
+ def validate(self):
+ # Validate upper layer input
+ self.validate_controller_exists()
+ self.validate_job_wait_negative_values()
+ # Validate std raid validation for inner layer
+ drives_exists_in_id = []
+ for each_volume in self.module_ext_params.get('volumes', []):
+ # Validatiing for negative values
+ self.validate_negative_values_for_volume_params(each_volume)
+ self.validate_volume_drives(each_volume)
+ if 'location' in each_volume['drives'] and each_volume['drives']['location']:
+ each_volume['drives'] = self.disk_slot_location_to_id_conversion(each_volume)
+ drives_exists_in_id += each_volume['drives']['id']
+ # Extendeding volume module input in module_ext_params for drives id and hotspare
+ self.updating_volume_module_input(drives_exists_in_id)
+
+ def execute(self):
+ self.validate()
+ job_dict = {}
+ name_id_mapping = {value.get('Name'): key for key, value in self.idrac_data["Controllers"][self.controller_id]["Volumes"].items()}
+ parent_payload = """<SystemConfiguration>{0}</SystemConfiguration>"""
+ payload = self.constuct_payload(name_id_mapping)
+ parent_payload = parent_payload.format(payload)
+ resp = self.idrac.import_scp(import_buffer=parent_payload, target="RAID", job_wait=False)
+ job_dict = self.wait_for_job_completion(resp)
+ return job_dict
+
+
+class StorageDelete(StorageValidation):
+ def check_even_single_given_volume_exists(self, volume_name_input_list):
+ for each_name in volume_name_input_list:
+ for cntrl_id, detail in self.idrac_data.get('Controllers').items():
+ for vol_id, volume in detail.get('Volumes').items():
+ if each_name == volume.get('Name'):
+ return True
+
+ def validate_volume_exists_in_server(self, volume_name_input_list):
+ changed, failed = False, False
+ single_volume_name_matched = self.check_even_single_given_volume_exists(volume_name_input_list)
+ if single_volume_name_matched:
+ if self.module.check_mode:
+ msg, changed = CHANGES_FOUND, True
+ else:
+ return
+ else:
+ msg, failed = VOLUME_NOT_FOUND, True
+ self.module.exit_json(msg=msg, failed=failed, changed=changed)
+
+ def validate(self):
+ # Validate upper layer input
+ self.validate_job_wait_negative_values()
+
+ # Validate for volume and volume_name
+ if (not (volumes := self.module.params.get('volumes'))) or (volumes and not all("name" in each for each in volumes)):
+ self.module.exit_json(msg=VOLUME_NAME_REQUIRED_FOR_DELETE, failed=True)
+
+ def construct_payload_for_delete(self, cntrl_id_vd_id_mapping):
+ parent_payload = """<SystemConfiguration>{0}</SystemConfiguration>"""
+ raid_payload = ""
+ for each_controller, value in cntrl_id_vd_id_mapping.items():
+ volume_payload = ""
+ for each_value in value:
+ volume_payload += xml_data_conversion({'RAIDaction': 'Delete'}, each_value)
+ raid_payload += xml_data_conversion({}, each_controller, volume_payload)
+ parent_payload = parent_payload.format(raid_payload)
+ return parent_payload
+
+ def get_vd_id_based_on_controller_id_vd_name(self, user_volume_input_list):
+ cntrl_id_vd_id_mapping = {}
+ for cntrl_id, detail in self.idrac_data.get('Controllers').items():
+ for vd_id, volume in detail.get('Volumes').items():
+ if volume.get('Name') in user_volume_input_list:
+ if cntrl_id not in cntrl_id_vd_id_mapping:
+ cntrl_id_vd_id_mapping[cntrl_id] = [vd_id]
+ else:
+ cntrl_id_vd_id_mapping[cntrl_id].append(vd_id)
+ return cntrl_id_vd_id_mapping
+
+ def execute(self):
+ self.validate()
+ job_dict = {}
+ volume_name_input_list = [each.get('name') for each in self.module.params.get('volumes')]
+ self.validate_volume_exists_in_server(set(volume_name_input_list))
+ cntrl_id_vd_id_mapping = self.get_vd_id_based_on_controller_id_vd_name(set(volume_name_input_list))
+ payload = self.construct_payload_for_delete(cntrl_id_vd_id_mapping)
+ resp = self.idrac.import_scp(import_buffer=payload, target="RAID", job_wait=False)
+ job_dict = self.wait_for_job_completion(resp)
+ return job_dict
+
+
+class StorageView(StorageData):
+ def __init__(self, idrac, module):
+ super().__init__(idrac, module)
+
+ def execute(self):
+ status = SUCCESS_STATUS
+ storage_data = self.fetch_storage_data()
+ controller_id = self.module.params.get("controller_id")
+ volume_id = self.module.params.get("volume_id")
+ if volume_id:
+ status, storage_data = self.process_volume_id(volume_id, controller_id, storage_data)
+ elif controller_id:
+ status, storage_data = self.process_controller_id(controller_id, storage_data)
+ return {"Message": storage_data, "Status": status}
+
+ def process_volume_id(self, volume_id, controller_id, storage_data):
+ status = SUCCESS_STATUS
+ if controller_id:
+ ctrl_data = storage_data["Controller"].get(controller_id)
+ if ctrl_data:
+ virtual_disk = ctrl_data.get("VirtualDisk")
+ if not virtual_disk or volume_id not in virtual_disk:
+ status = FAILED_STATUS
+ message = VIEW_VIRTUAL_DISK_DETAILS_NOT_FOUND.format(volume_id=volume_id, controller_id=controller_id)
+ self.module.exit_json(msg=VIEW_OPERATION_FAILED,
+ storage_status={"Message": message, "Status": status},
+ failed=True)
+ else:
+ storage_data[controller_id] = {"VirtualDisk": ctrl_data["VirtualDisk"]}
+ del storage_data["Controller"]
+ else:
+ status = FAILED_STATUS
+ message = VIEW_CONTROLLER_DETAILS_NOT_FOUND.format(controller_id=controller_id)
+ self.module.exit_json(msg=VIEW_OPERATION_FAILED,
+ storage_status={"Message": message, "Status": status},
+ failed=True)
+ else:
+ status = FAILED_STATUS
+ message = VIEW_OPERATION_CONTROLLER_NOT_SPECIFIED
+ self.module.exit_json(msg=VIEW_OPERATION_FAILED,
+ storage_status={"Message": message, "Status": status},
+ failed=True)
+ return status, storage_data
+
+ def process_controller_id(self, controller_id, storage_data):
+ status = SUCCESS_STATUS
+ ctrl_data = storage_data["Controller"].get(controller_id)
+ if ctrl_data:
+ storage_data[controller_id] = ctrl_data
+ del storage_data["Controller"]
+ else:
+ status = FAILED_STATUS
+ message = VIEW_CONTROLLER_DETAILS_NOT_FOUND.format(controller_id=controller_id)
+ self.module.exit_json(msg=VIEW_OPERATION_FAILED,
+ storage_status={"Message": message, "Status": status},
+ failed=True)
+ return status, storage_data
+
+
+def main():
+ specs = {
+ "state": {"choices": ['create', 'delete', 'view'], "default": 'view'},
+ "volume_id": {"type": 'str'},
+ "volumes": {"type": 'list', "elements": 'dict'},
+ "span_depth": {"type": 'int', "default": 1},
+ "span_length": {"type": 'int', "default": 1},
+ "number_dedicated_hot_spare": {"type": 'int', "default": 0},
+ "volume_type": {"choices": ['RAID 0', 'RAID 1', 'RAID 5', 'RAID 6', 'RAID 10', 'RAID 50', 'RAID 60'],
+ "default": 'RAID 0'},
+ "disk_cache_policy": {"choices": ["Default", "Enabled", "Disabled"],
+ "default": "Default"},
+ "write_cache_policy": {"choices": ["WriteThrough", "WriteBack", "WriteBackForce"],
+ "default": "WriteThrough"},
+ "read_cache_policy": {"choices": ["NoReadAhead", "ReadAhead", "AdaptiveReadAhead"],
+ "default": "NoReadAhead"},
+ "stripe_size": {"type": 'int', "default": 64 * 1024},
+ "capacity": {"type": 'float'},
+ "controller_id": {"type": 'str'},
+ "media_type": {"choices": ['HDD', 'SSD']},
+ "protocol": {"choices": ['SAS', 'SATA', 'PCIE']},
+ "raid_reset_config": {"choices": ['true', 'false'], "default": 'false'},
+ "raid_init_operation": {"choices": ['None', 'Fast']},
+ "job_wait": {"type": "bool", "default": True},
+ "job_wait_timeout": {"type": "int", "default": 900}
+ }
+ specs.update(idrac_auth_params)
+ module = AnsibleModule(
+ argument_spec=specs,
+ supports_check_mode=True)
+ try:
+ with iDRACRedfishAPI(module.params) as idrac:
+ changed = False
+ state_class_mapping = {
+ 'create': StorageCreate,
+ 'view': StorageView,
+ 'delete': StorageDelete,
+ }
+ state_type = state_class_mapping.get(module.params['state'])
+ obj = state_type(idrac, module)
+ output = obj.execute()
+ msg = SUCCESSFUL_OPERATION_MSG.format(operation=module.params['state'])
+ changed = True if module.params['state'] in ['create', 'delete'] else False
+ module.exit_json(msg=msg, changed=changed, storage_status=output)
+ except HTTPError as err:
+ import json
+ module.exit_json(msg=str(err), error_info=json.load(err), failed=True)
+ except URLError as err:
+ module.exit_json(msg=str(err), unreachable=True)
+ except (SSLValidationError, ConnectionError, TypeError, ValueError, OSError) as err:
+ module.exit_json(msg=str(err), failed=True)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
index 65b1ae271..760546f02 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -130,12 +130,13 @@ options:
description: The frequency of the PowerManager extension data maintenance and purging.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- This module supports C(check_mode).
author:
- Sachin Apagundi(@sachin-apa)
- Husniya Hameed (@husniya-hameed)
+ - ShivamSh3 (@ShivamSh3)
'''
EXAMPLES = r'''
@@ -656,13 +657,13 @@ def main():
for cp in resp_data:
cp_data = strip_substr_dict(cp)
cp_list.append(cp_data)
- module.exit_json(msg=SUCCESS_MSG, console_preferences=cp_list)
+ module.exit_json(msg=SUCCESS_MSG, console_preferences=cp_list, changed=True)
except HTTPError as err:
- module.fail_json(msg=str(err), error_info=json.load(err))
+ module.exit_json(msg=str(err), error_info=json.load(err), failed=True)
except URLError as err:
module.exit_json(msg=str(err), unreachable=True)
except (IOError, ValueError, SSLError, TypeError, ConnectionError, AttributeError, IndexError, KeyError, OSError) as err:
- module.fail_json(msg=str(err), error_info=json.load(err))
+ module.exit_json(msg=str(err), failed=True)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
index 7de50f0fb..c6245a6de 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -130,7 +130,7 @@ options:
- ja to set Japanese language.
- zh to set Chinese language.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
- "Shivam Sharma (@ShivamSh3)"
@@ -221,9 +221,9 @@ location_details:
"LcdCustomString": "LCD Text",
"LcdLanguage": "en",
"LcdOverridePin": "",
- "LcdPinLength": null,
+ "LcdPinLength": 6,
"LcdPresence": "Present",
- "LedPresence": null,
+ "LedPresence": "Absent",
"QuickSync": {
"EnableInactivityTimeout": true,
"EnableQuickSyncWifi": false,
@@ -380,6 +380,8 @@ def check_mode_validation(module, loc_resp):
payload["QuickSync"]["QuickSyncHardware"] = loc_resp["QuickSync"]["QuickSyncHardware"]
payload["SettingType"] = "LocalAccessConfiguration"
payload["LcdPresence"] = loc_resp["LcdPresence"]
+ payload["LcdPinLength"] = loc_resp["LcdPinLength"]
+ payload["LedPresence"] = loc_resp["LedPresence"]
return payload
@@ -476,7 +478,7 @@ def main():
except URLError as err:
module.exit_json(msg=str(err), unreachable=True)
except (IOError, ValueError, SSLError, TypeError, ConnectionError, AttributeError, IndexError, KeyError, OSError) as err:
- module.fail_json(msg=str(err))
+ module.exit_json(msg=str(err), failed=True)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
index f12cf7078..7d45bf7e9 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.7.0
+# Version 9.1.0
# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -127,6 +127,7 @@ requirements:
author:
- "Felix Stephen (@felixs88)"
- "Shivam Sharma (@ShivamSh3)"
+ - "Kritika Bhateja (@Kritika-Bhateja-03)"
notes:
- Run this module from a system that has direct access to OpenManage Enterprise Modular.
- This module supports C(check_mode).
@@ -395,7 +396,7 @@ def ip_address_field(module, field, deploy_options, slot=False):
if field_value is not None:
valid = validate_ip_address(module_params.get(val[0]), val[1])
if valid is False:
- module.fail_json(msg=IP_FAIL_MSG.format(field_value, val[0]))
+ module.exit_json(msg=IP_FAIL_MSG.format(field_value, val[0]), failed=True)
def check_domain_service(module, rest_obj):
@@ -404,7 +405,7 @@ def check_domain_service(module, rest_obj):
except HTTPError as err:
err_message = json.load(err)
if err_message["error"]["@Message.ExtendedInfo"][0]["MessageId"] == "CGEN1006":
- module.fail_json(msg=DOMAIN_FAIL_MSG)
+ module.exit_json(msg=DOMAIN_FAIL_MSG, failed=True)
def get_ip_from_host(hostname):
@@ -431,7 +432,7 @@ def get_chassis_device(module, rest_obj):
key, value = ("Id", data["DeviceId"])
break
else:
- module.fail_json(msg=FETCH_FAIL_MSG)
+ module.exit_json(msg=FETCH_FAIL_MSG, failed=True)
return key, value
@@ -469,12 +470,10 @@ def check_mode_validation(module, deploy_data):
"SlotIPV6Address": each.get("slot_ipv6_address"), "VlanId": each.get("vlan_id")}
if each.get("vlan_id") is not None:
req_slot_1.update({"VlanId": str(each.get("vlan_id"))})
- else:
- req_slot_1.update({"VlanId": ""})
req_filter_slot = dict([(k, v) for k, v in req_slot_1.items() if v is not None])
- exist_slot_1 = {"SlotId": exist_filter_slot[0]["SlotId"],
- "SlotIPV4Address": exist_filter_slot[0]["SlotIPV4Address"],
- "SlotIPV6Address": exist_filter_slot[0]["SlotIPV6Address"]}
+ exist_slot_1 = {"SlotId": exist_filter_slot[0].get("SlotId"),
+ "SlotIPV4Address": exist_filter_slot[0].get("SlotIPV4Address"),
+ "SlotIPV6Address": exist_filter_slot[0].get("SlotIPV6Address")}
if "VlanId" in exist_filter_slot[0]:
exist_slot_1.update({"VlanId": exist_filter_slot[0]["VlanId"]})
else:
@@ -487,7 +486,7 @@ def check_mode_validation(module, deploy_data):
else:
invalid_slot.append(each["slot_id"])
if invalid_slot:
- module.fail_json(msg=INVALID_SLOT_MSG.format(", ".join(map(str, invalid_slot))))
+ module.exit_json(msg=INVALID_SLOT_MSG.format(", ".join(map(str, invalid_slot))), failed=True)
if module.check_mode and any(diff_changes) is True:
module.exit_json(msg=CHANGES_FOUND, changed=True, quick_deploy_settings=deploy_data)
elif (module.check_mode and any(diff_changes) is False) or \
@@ -597,23 +596,25 @@ def get_device_details(rest_obj, module):
resp_data = resp.json_data.get("value")
rename_key = "id" if key == "Id" else "service tag"
if not resp_data:
- module.fail_json(msg=DEVICE_FAIL_MSG.format(rename_key, value))
+ module.exit_json(msg=DEVICE_FAIL_MSG.format(rename_key, value), failed=True)
if key == "DeviceServiceTag" and resp_data[0]["DeviceServiceTag"] == tag:
device_id = resp_data[0]["Id"]
elif key == "Id" and resp_data[0]["Id"] == device_id:
device_id = resp_data[0]["Id"]
else:
- module.fail_json(msg=DEVICE_FAIL_MSG.format(rename_key, value))
+ module.exit_json(msg=DEVICE_FAIL_MSG.format(rename_key, value), failed=True)
settings_type, settings_key = "IOMQuickDeploy", "IOM Quick Deploy"
if module.params["setting_type"] == "ServerQuickDeploy":
settings_type, settings_key = "ServerQuickDeploy", "Server Quick Deploy"
try:
deploy_resp = rest_obj.invoke_request("GET", QUICK_DEPLOY_API.format(device_id, settings_type))
except HTTPError as err:
- err_message = json.load(err)
- error_msg = err_message.get('error', {}).get('@Message.ExtendedInfo')
+ if err.status == 404:
+ module.exit_json(msg=DEVICE_FAIL_MSG.format(rename_key, value), failed=True)
+ err_message = json.load(err).get("error")
+ error_msg = err_message.get('@Message.ExtendedInfo')
if error_msg and error_msg[0].get("MessageId") == "CGEN1004":
- module.fail_json(msg=QUICK_DEPLOY_FAIL_MSG.format(settings_key))
+ module.exit_json(msg=QUICK_DEPLOY_FAIL_MSG.format(settings_key), failed=True)
else:
resp_data = rest_obj.strip_substr_dict(deploy_resp.json_data)
payload, slot_payload = check_mode_validation(module, resp_data)
@@ -621,7 +622,7 @@ def get_device_details(rest_obj, module):
if module.params["job_wait"]:
job_failed, job_msg = rest_obj.job_tracking(job_id, job_wait_sec=module.params["job_wait_timeout"])
if job_failed is True:
- module.fail_json(msg=FAIL_MSG)
+ module.exit_json(msg=FAIL_MSG, failed=True)
job_success_resp = rest_obj.invoke_request("GET", QUICK_DEPLOY_API.format(device_id, settings_type))
job_success_data = rest_obj.strip_substr_dict(job_success_resp.json_data)
return job_id, job_success_data
@@ -667,7 +668,7 @@ def main():
mutually_exclusive=[('device_id', 'device_service_tag')],
supports_check_mode=True,)
if module.params["quick_deploy_options"] is None:
- module.fail_json(msg="missing required arguments: quick_deploy_options")
+ module.exit_json(msg="missing required arguments: quick_deploy_options", failed=True)
fields = [("ipv4_subnet_mask", "IPV4"), ("ipv4_gateway", "IPV4"), ("ipv6_gateway", "IPV6")]
ip_address_field(module, fields, module.params["quick_deploy_options"], slot=False)
slot_options = module.params["quick_deploy_options"].get("slots")
@@ -683,12 +684,12 @@ def main():
module.exit_json(msg=SUCCESS_MSG, job_id=job_id, quick_deploy_settings=data, changed=True)
module.exit_json(msg=JOB_MSG, job_id=job_id)
except HTTPError as err:
- module.fail_json(msg=str(err), error_info=json.load(err))
+ module.exit_json(msg=str(err), error_info=json.load(err), failed=True)
except URLError as err:
module.exit_json(msg=str(err), unreachable=True)
except (IOError, ValueError, SSLError, TypeError, ConnectionError,
AttributeError, IndexError, KeyError, OSError) as err:
- module.fail_json(msg=str(err))
+ module.exit_json(msg=str(err), failed=True)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
index 876e5b235..2dc7b625d 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -19,8 +19,6 @@ module: ome_devices
short_description: Perform device-specific operations on target devices
description: Perform device-specific operations such as refresh inventory, clear iDRAC job queue, and reset iDRAC from OpenManage Enterprise.
version_added: 6.1.0
-author:
- - Jagadeesh N V(@jagadeeshnv)
extends_documentation_fragment:
- dellemc.openmanage.oment_auth_options
options:
@@ -77,7 +75,10 @@ options:
description: Optional description for the job.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
+author:
+ - Jagadeesh N V(@jagadeeshnv)
+ - ShivamSh3(@ShivamSh3)
notes:
- For C(idrac_reset), the job triggers only the iDRAC reset operation and does not track the complete reset cycle.
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -248,6 +249,7 @@ JOB_DESC = "The {0} task initiated from OpenManage Ansible Modules for devices w
APPLY_TRIGGERED = "Successfully initiated the device action job."
JOB_SCHEDULED = "The job is scheduled successfully."
SUCCESS_MSG = "The device operation is performed successfully."
+TIMEOUT_NEGATIVE_MSG = "The parameter `job_wait_timeout` value cannot be negative or zero."
all_device_types = [1000, 2000, 4000, 5000, 7000, 8000, 9001]
device_type_map = {"refresh_inventory": all_device_types, "reset_idrac": [1000], "clear_idrac_job_queue": [1000]}
@@ -419,6 +421,8 @@ def main():
supports_check_mode=True
)
try:
+ if module.params.get("job_wait") and module.params.get("job_wait_timeout") <= 0:
+ module.exit_json(msg=TIMEOUT_NEGATIVE_MSG, failed=True)
with RestOME(module.params, req_session=True) as rest_obj:
if module.params.get("state") == 'present':
valids, invalids = get_dev_ids(module, rest_obj,
@@ -432,12 +436,12 @@ def main():
module.exit_json(msg=NO_CHANGES_MSG)
delete_devices(module, rest_obj, valids)
except HTTPError as err:
- module.fail_json(msg=str(err), error_info=json.load(err))
+ module.exit_json(msg=str(err), error_info=json.load(err), failed=True)
except URLError as err:
module.exit_json(msg=str(err), unreachable=True)
except (IOError, ValueError, SSLError, TypeError, ConnectionError, AttributeError, IndexError, KeyError,
OSError) as err:
- module.fail_json(msg=str(err))
+ module.exit_json(msg=str(err), failed=True)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
index d8f0c5503..3de200a8f 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.5.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -182,11 +182,13 @@ requirements:
author:
- "Sajna Shetty(@Sajna-Shetty)"
- "Kritika Bhateja(@Kritika-Bhateja-03)"
+ - "Shivam Sharma(@ShivamSh3)"
notes:
- Run this module from a system that has direct access to Redfish APIs.
- This module supports C(check_mode).
- This module always reports changes when I(name) and I(volume_id) are not specified.
Either I(name) or I(volume_id) is required to support C(check_mode).
+ - This module does not support the create operation of RAID6 and RAID60 storage volume on iDRAC8
- This module supports IPv4 and IPv6 addresses.
'''
@@ -374,6 +376,7 @@ import copy
from ssl import SSLError
from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import MANAGER_JOB_ID_URI, wait_for_redfish_reboot_job, \
@@ -401,6 +404,9 @@ REBOOT_FAIL = "Failed to reboot the server."
CONTROLLER_NOT_EXIST_ERROR = "Specified Controller {controller_id} does not exist in the System."
TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The parameter job_wait_timeout value cannot be negative or zero."
SYSTEM_ID = "System.Embedded.1"
+GET_IDRAC_FIRMWARE_VER_URI = "/redfish/v1/Managers/iDRAC.Embedded.1?$select=FirmwareVersion"
+ODATA_ID = "@odata.id"
+TARGET_OUT_OF_BAND = "Target out-of-band controller does not support storage feature using Redfish API."
volume_type_map = {"NonRedundant": "RAID0",
"Mirrored": "RAID1",
"StripedWithParity": "RAID5",
@@ -414,26 +420,26 @@ def fetch_storage_resource(module, session_obj):
system_resp = session_obj.invoke_request("GET", system_uri)
system_members = system_resp.json_data.get("Members")
if system_members:
- system_id_res = system_members[0]["@odata.id"]
- SYSTEM_ID = system_id_res.split('/')[-1]
+ system_id_res = system_members[0][ODATA_ID]
+ _SYSTEM_ID = system_id_res.split('/')[-1]
system_id_res_resp = session_obj.invoke_request("GET", system_id_res)
system_id_res_data = system_id_res_resp.json_data.get("Storage")
if system_id_res_data:
- storage_collection_map.update({"storage_base_uri": system_id_res_data["@odata.id"]})
+ storage_collection_map.update({"storage_base_uri": system_id_res_data[ODATA_ID]})
else:
- module.fail_json(msg="Target out-of-band controller does not support storage feature using Redfish API.")
+ module.fail_json(msg=TARGET_OUT_OF_BAND)
else:
- module.fail_json(msg="Target out-of-band controller does not support storage feature using Redfish API.")
+ module.fail_json(msg=TARGET_OUT_OF_BAND)
except HTTPError as err:
if err.code in [404, 405]:
- module.fail_json(msg="Target out-of-band controller does not support storage feature using Redfish API.",
+ module.fail_json(msg=TARGET_OUT_OF_BAND,
error_info=json.load(err))
raise err
except (URLError, SSLValidationError, ConnectionError, TypeError, ValueError) as err:
raise err
-def volume_payload(module):
+def volume_payload(module, greater_version):
params = module.params
drives = params.get("drives")
capacity_bytes = params.get("capacity_bytes")
@@ -448,8 +454,8 @@ def volume_payload(module):
capacity_bytes = int(capacity_bytes)
if drives:
storage_base_uri = storage_collection_map["storage_base_uri"]
- physical_disks = [{"@odata.id": DRIVES_URI.format(storage_base_uri=storage_base_uri,
- driver_id=drive_id)} for drive_id in drives]
+ physical_disks = [{ODATA_ID: DRIVES_URI.format(storage_base_uri=storage_base_uri,
+ driver_id=drive_id)} for drive_id in drives]
raid_mapper = {
"Name": params.get("name"),
"BlockSizeBytes": params.get("block_size_bytes"),
@@ -464,10 +470,15 @@ def volume_payload(module):
raid_payload.update({"Encrypted": encrypted})
if encryption_types:
raid_payload.update({"EncryptionTypes": [encryption_types]})
- if volume_type:
+ if volume_type and greater_version:
raid_payload.update({"RAIDType": volume_type_map.get(volume_type)})
- if raid_type:
+ if raid_type and greater_version:
raid_payload.update({"RAIDType": raid_type})
+ if volume_type and greater_version is False:
+ raid_payload.update({"VolumeType": volume_type})
+ if raid_type and greater_version is False:
+ raid_map = {value: key for key, value in volume_type_map.items()}
+ raid_payload.update({"VolumeType": raid_map.get(raid_type)})
if apply_time is not None:
raid_payload.update({"@Redfish.OperationApplyTime": apply_time})
return raid_payload
@@ -561,7 +572,7 @@ def perform_storage_volume_action(method, uri, session_obj, action, payload=None
raise err
-def check_mode_validation(module, session_obj, action, uri):
+def check_mode_validation(module, session_obj, action, uri, greater_version):
volume_id = module.params.get('volume_id')
name = module.params.get("name")
block_size_bytes = module.params.get("block_size_bytes")
@@ -575,49 +586,86 @@ def check_mode_validation(module, session_obj, action, uri):
if name is None and volume_id is None and module.check_mode:
module.exit_json(msg=CHANGES_FOUND, changed=True)
if action == "create" and name is not None:
- volume_resp = session_obj.invoke_request("GET", uri)
- volume_resp_data = volume_resp.json_data
- if volume_resp_data.get("Members@odata.count") == 0 and module.check_mode:
- module.exit_json(msg=CHANGES_FOUND, changed=True)
- elif 0 < volume_resp_data.get("Members@odata.count"):
- for mem in volume_resp_data.get("Members"):
- mem_resp = session_obj.invoke_request("GET", mem["@odata.id"])
- if mem_resp.json_data["Name"] == name:
- volume_id = mem_resp.json_data["Id"]
- break
- if name is not None and module.check_mode and volume_id is None:
- module.exit_json(msg=CHANGES_FOUND, changed=True)
+ volume_id = _create_name(module, session_obj, uri, name, volume_id)
if volume_id is not None:
- resp = session_obj.invoke_request("GET", SETTING_VOLUME_ID_URI.format(
- storage_base_uri=storage_collection_map["storage_base_uri"],
- volume_id=volume_id))
- resp_data = resp.json_data
+ _volume_id_check_mode(module, session_obj, greater_version, volume_id,
+ name, block_size_bytes, capacity_bytes, optimum_io_size_bytes,
+ encryption_types, encrypted, volume_type, raid_type, drives)
+ return None
+
+
+def _volume_id_check_mode(module, session_obj, greater_version, volume_id, name,
+ block_size_bytes, capacity_bytes, optimum_io_size_bytes,
+ encryption_types, encrypted, volume_type, raid_type, drives):
+ resp = session_obj.invoke_request("GET", SETTING_VOLUME_ID_URI.format(
+ storage_base_uri=storage_collection_map["storage_base_uri"],
+ volume_id=volume_id))
+ resp_data = resp.json_data
+ exist_value = _get_payload_for_version(greater_version, resp_data)
+ exit_value_filter = dict(
+ [(k, v) for k, v in exist_value.items() if v is not None])
+ cp_exist_value = copy.deepcopy(exit_value_filter)
+ req_value = get_request_value(greater_version, name, block_size_bytes, optimum_io_size_bytes, encryption_types, encrypted, volume_type, raid_type)
+ if capacity_bytes is not None:
+ req_value["CapacityBytes"] = int(capacity_bytes)
+ req_value_filter = dict([(k, v)
+ for k, v in req_value.items() if v is not None])
+ cp_exist_value.update(req_value_filter)
+ exist_drive, req_drive = [], []
+ if resp_data["Links"]:
+ exist_drive = [
+ disk[ODATA_ID].split("/")[-1] for disk in resp_data["Links"]["Drives"]]
+ if drives is not None:
+ req_drive = sorted(drives)
+ diff_changes = [bool(set(exit_value_filter.items()) ^ set(cp_exist_value.items())) or
+ bool(set(exist_drive) ^ set(req_drive))]
+ if module.check_mode and any(diff_changes) is True:
+ module.exit_json(msg=CHANGES_FOUND, changed=True)
+ elif (module.check_mode and any(diff_changes) is False) or \
+ (not module.check_mode and any(diff_changes) is False):
+ module.exit_json(msg=NO_CHANGES_FOUND)
+
+
+def get_request_value(greater_version, name, block_size_bytes, optimum_io_size_bytes, encryption_types, encrypted, volume_type, raid_type):
+ if greater_version:
+ req_value = {"Name": name, "BlockSizeBytes": block_size_bytes,
+ "Encrypted": encrypted, "OptimumIOSizeBytes": optimum_io_size_bytes,
+ "RAIDType": raid_type, "EncryptionTypes": encryption_types}
+ else:
+ req_value = {"Name": name, "BlockSizeBytes": block_size_bytes,
+ "Encrypted": encrypted, "OptimumIOSizeBytes": optimum_io_size_bytes,
+ "VolumeType": volume_type, "EncryptionTypes": encryption_types}
+ return req_value
+
+
+def _get_payload_for_version(greater_version, resp_data):
+ if greater_version:
exist_value = {"Name": resp_data["Name"], "BlockSizeBytes": resp_data["BlockSizeBytes"],
"CapacityBytes": resp_data["CapacityBytes"], "Encrypted": resp_data["Encrypted"],
"EncryptionTypes": resp_data["EncryptionTypes"][0],
"OptimumIOSizeBytes": resp_data["OptimumIOSizeBytes"], "RAIDType": resp_data["RAIDType"]}
- exit_value_filter = dict([(k, v) for k, v in exist_value.items() if v is not None])
- cp_exist_value = copy.deepcopy(exit_value_filter)
- req_value = {"Name": name, "BlockSizeBytes": block_size_bytes,
- "Encrypted": encrypted, "OptimumIOSizeBytes": optimum_io_size_bytes,
- "RAIDType": raid_type, "EncryptionTypes": encryption_types}
- if capacity_bytes is not None:
- req_value["CapacityBytes"] = int(capacity_bytes)
- req_value_filter = dict([(k, v) for k, v in req_value.items() if v is not None])
- cp_exist_value.update(req_value_filter)
- exist_drive, req_drive = [], []
- if resp_data["Links"]:
- exist_drive = [disk["@odata.id"].split("/")[-1] for disk in resp_data["Links"]["Drives"]]
- if drives is not None:
- req_drive = sorted(drives)
- diff_changes = [bool(set(exit_value_filter.items()) ^ set(cp_exist_value.items())) or
- bool(set(exist_drive) ^ set(req_drive))]
- if module.check_mode and any(diff_changes) is True:
- module.exit_json(msg=CHANGES_FOUND, changed=True)
- elif (module.check_mode and any(diff_changes) is False) or \
- (not module.check_mode and any(diff_changes) is False):
- module.exit_json(msg=NO_CHANGES_FOUND)
- return None
+ else:
+ exist_value = {"Name": resp_data["Name"], "BlockSizeBytes": resp_data["BlockSizeBytes"],
+ "CapacityBytes": resp_data["CapacityBytes"], "Encrypted": resp_data["Encrypted"],
+ "EncryptionTypes": resp_data["EncryptionTypes"][0],
+ "OptimumIOSizeBytes": resp_data["OptimumIOSizeBytes"], "VolumeType": resp_data["VolumeType"]}
+ return exist_value
+
+
+def _create_name(module, session_obj, uri, name, volume_id):
+ volume_resp = session_obj.invoke_request("GET", uri)
+ volume_resp_data = volume_resp.json_data
+ if volume_resp_data.get("Members@odata.count") == 0 and module.check_mode:
+ module.exit_json(msg=CHANGES_FOUND, changed=True)
+ elif 0 < volume_resp_data.get("Members@odata.count"):
+ for mem in volume_resp_data.get("Members"):
+ mem_resp = session_obj.invoke_request("GET", mem[ODATA_ID])
+ if mem_resp.json_data["Name"] == name:
+ volume_id = mem_resp.json_data["Id"]
+ break
+ if name is not None and module.check_mode and volume_id is None:
+ module.exit_json(msg=CHANGES_FOUND, changed=True)
+ return volume_id
def check_raid_type_supported(module, session_obj):
@@ -638,7 +686,7 @@ def check_raid_type_supported(module, session_obj):
raise err
-def get_apply_time(module, session_obj, controller_id):
+def get_apply_time(module, session_obj, controller_id, greater_version):
"""
gets the apply time from user if given otherwise fetches from server
"""
@@ -646,7 +694,10 @@ def get_apply_time(module, session_obj, controller_id):
try:
uri = APPLY_TIME_INFO_API.format(storage_base_uri=storage_collection_map["storage_base_uri"], controller_id=controller_id)
resp = session_obj.invoke_request("GET", uri)
- supported_apply_time_values = resp.json_data['@Redfish.OperationApplyTimeSupport']['SupportedValues']
+ if greater_version:
+ supported_apply_time_values = resp.json_data['@Redfish.OperationApplyTimeSupport']['SupportedValues']
+ else:
+ return apply_time
if apply_time:
if apply_time not in supported_apply_time_values:
module.exit_json(msg=APPLY_TIME_NOT_SUPPORTED_MSG.format(apply_time=apply_time, supported_apply_time_values=supported_apply_time_values),
@@ -658,24 +709,25 @@ def get_apply_time(module, session_obj, controller_id):
raise err
-def check_apply_time_supported_and_reboot_required(module, session_obj, controller_id):
+def check_apply_time_supported_and_reboot_required(module, session_obj, controller_id, greater_version):
"""
checks whether the apply time is supported and reboot operation is required or not.
"""
- apply_time = get_apply_time(module, session_obj, controller_id)
+ apply_time = get_apply_time(module, session_obj, controller_id, greater_version)
reboot_server = module.params.get("reboot_server")
if reboot_server and apply_time == "OnReset":
return True
return False
-def perform_volume_create_modify(module, session_obj):
+def perform_volume_create_modify(module, session_obj, greater_version):
"""
perform volume creation and modification for state present
"""
specified_controller_id = module.params.get("controller_id")
volume_id = module.params.get("volume_id")
- check_raid_type_supported(module, session_obj)
+ if greater_version:
+ check_raid_type_supported(module, session_obj)
action, uri, method = None, None, None
if specified_controller_id is not None:
check_controller_id_exists(module, session_obj)
@@ -690,8 +742,8 @@ def perform_volume_create_modify(module, session_obj):
volume_id=volume_id)
method = "PATCH"
action = "modify"
- payload = volume_payload(module)
- check_mode_validation(module, session_obj, action, uri)
+ payload = volume_payload(module, greater_version)
+ check_mode_validation(module, session_obj, action, uri, greater_version)
if not payload:
module.fail_json(msg="Input options are not provided for the {0} volume task.".format(action))
return perform_storage_volume_action(method, uri, session_obj, action, payload)
@@ -742,7 +794,7 @@ def perform_volume_initialization(module, session_obj):
module.fail_json(msg="'volume_id' option is a required property for initializing a volume.")
-def configure_raid_operation(module, session_obj):
+def configure_raid_operation(module, session_obj, greater_version):
"""
configure raid action based on state and command input
"""
@@ -750,7 +802,7 @@ def configure_raid_operation(module, session_obj):
state = module_params.get("state")
command = module_params.get("command")
if state is not None and state == "present":
- return perform_volume_create_modify(module, session_obj)
+ return perform_volume_create_modify(module, session_obj, greater_version)
elif state is not None and state == "absent":
return perform_volume_deletion(module, session_obj)
elif command is not None and command == "initialize":
@@ -818,11 +870,11 @@ def perform_reboot(module, session_obj):
module.exit_json(msg=msg, job_status=job_data)
-def check_job_tracking_required(module, session_obj, reboot_required, controller_id):
+def check_job_tracking_required(module, session_obj, reboot_required, controller_id, greater_version):
job_wait = module.params.get("job_wait")
apply_time = None
if controller_id:
- apply_time = get_apply_time(module, session_obj, controller_id)
+ apply_time = get_apply_time(module, session_obj, controller_id, greater_version)
if job_wait:
if apply_time == "OnReset" and not reboot_required:
return False
@@ -855,6 +907,15 @@ def validate_negative_job_time_out(module):
module.exit_json(msg=TIMEOUT_NEGATIVE_OR_ZERO_MSG, failed=True)
+def is_fw_ver_greater(session_obj):
+ firm_version = session_obj.invoke_request('GET', GET_IDRAC_FIRMWARE_VER_URI)
+ version = firm_version.json_data.get('FirmwareVersion', '')
+ if LooseVersion(version) <= '3.0':
+ return False
+ else:
+ return True
+
+
def main():
specs = {
"state": {"type": "str", "required": False, "choices": ['present', 'absent']},
@@ -899,6 +960,7 @@ def main():
validate_inputs(module)
validate_negative_job_time_out(module)
with Redfish(module.params, req_session=True) as session_obj:
+ greater_version = is_fw_ver_greater(session_obj)
fetch_storage_resource(module, session_obj)
controller_id = module.params.get("controller_id")
volume_id = module.params.get("volume_id")
@@ -907,16 +969,16 @@ def main():
if controller_id:
uri = CONTROLLER_URI.format(storage_base_uri=storage_collection_map["storage_base_uri"], controller_id=controller_id)
resp = check_specified_identifier_exists_in_the_system(module, session_obj, uri, CONTROLLER_NOT_EXIST_ERROR.format(controller_id=controller_id))
- reboot_required = check_apply_time_supported_and_reboot_required(module, session_obj, controller_id)
- status_message = configure_raid_operation(module, session_obj)
+ reboot_required = check_apply_time_supported_and_reboot_required(module, session_obj, controller_id, greater_version)
+ status_message = configure_raid_operation(module, session_obj, greater_version)
if volume_id and reboot_server:
controller_id = volume_id.split(":")[-1]
uri = CONTROLLER_URI.format(storage_base_uri=storage_collection_map["storage_base_uri"], controller_id=controller_id)
resp = check_specified_identifier_exists_in_the_system(module, session_obj, uri, CONTROLLER_NOT_EXIST_ERROR.format(controller_id=controller_id))
- reboot_required = check_apply_time_supported_and_reboot_required(module, session_obj, controller_id)
+ reboot_required = check_apply_time_supported_and_reboot_required(module, session_obj, controller_id, greater_version)
if reboot_required:
perform_reboot(module, session_obj)
- job_tracking_required = check_job_tracking_required(module, session_obj, reboot_required, controller_id)
+ job_tracking_required = check_job_tracking_required(module, session_obj, reboot_required, controller_id, greater_version)
job_id = status_message.get("task_id")
job_url = MANAGER_JOB_ID_URI.format(job_id)
if job_tracking_required:
diff --git a/ansible_collections/dellemc/openmanage/requirements.txt b/ansible_collections/dellemc/openmanage/requirements.txt
index 30a428ace..d0f6b9867 100644
--- a/ansible_collections/dellemc/openmanage/requirements.txt
+++ b/ansible_collections/dellemc/openmanage/requirements.txt
@@ -1,3 +1,2 @@
omsdk
netaddr>=0.7.19
-jmespath
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
index 46d74222a..a91b20b69 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
@@ -14,13 +14,10 @@
/iDRAC.Embedded.1/Jobs?$expand=*($levels=1)"
- name: Fetch Bios Jobs Data
- when: idrac_bios_uri_data.json.Members | length > 0
+ loop: "{{ idrac_bios_uri_data.json.Members }}"
+ when: item.JobType == 'BIOSConfiguration' and item.JobState in ['Scheduled', 'Scheduling']
ansible.builtin.set_fact:
- idrac_bios_jobs_items: "{{ idrac_bios_uri_data.json.Members
- | json_query(query) }}"
- vars:
- query: "[?JobType=='BIOSConfiguration' && JobState=='Scheduled'
- || JobState=='Scheduling' ]"
+ idrac_bios_jobs_items: "{{ idrac_bios_jobs_items | default([]) + [item] }}"
no_log: true
- name: Block for creating a bios job as a pre-requisite
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml
index 44439ab07..622c48964 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml
@@ -49,7 +49,7 @@
ansible.builtin.assert:
that: "idrac_bios_out.attributes.status_msg is search('The
maintenance time must be post-fixed with local offset
- to -06:00.')"
+ to')"
- name: Block to update attributes with maintenance window
with invalid duration
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml
index 7db461ce1..4aa8e7661 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml
@@ -55,6 +55,11 @@
delay: "{{ delay_count }}"
no_log: true
+ - name: Waiting for the iDRAC to be available
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
+
- name: Pre-requisite - Making sure enabled is false on first boot_option_reference
check_mode: false
ansible.builtin.import_role:
@@ -65,20 +70,6 @@
enabled: false
tags: molecule-idempotence-notest
- - name: Checking for LCStatus after running pre-requisite
- ansible.builtin.uri:
- <<: *uri_input
- url: "{{ lc_uri }}"
- method: POST
- body: {}
- register: lc_status_result
- check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
- until: lc_status_result.json.LCStatus == "Ready"
- retries: "{{ retry_count }}"
- delay: "{{ delay_count }}"
- no_log: true
-
- name: TC-115424 - Validate boot_options using boot_option_reference and default enabled
ansible.builtin.include_role:
name: "idrac_boot"
@@ -101,6 +92,11 @@
delay: "{{ delay_count }}"
no_log: true
+ - name: Waiting for the iDRAC to be available
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
+
- name: Asserting TC-115424 in check mode
ansible.builtin.assert:
that: idrac_boot_out.msg == "Changes found to be applied."
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml
index 9bf8ed1e4..342873325 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml
@@ -73,7 +73,6 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml
index 92d1958d7..851a4cc73 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml
@@ -29,23 +29,6 @@
timeout: "{{ https_timeout }}"
no_log: true
- - name: Fetching boot order from iDRAC
- ansible.builtin.uri:
- <<: *uri_input
- url: "{{ boot_order_uri }}"
- method: GET
- register: result_data
- check_mode: false
- no_log: true
-
- - name: Extracing BootOrder from output
- ansible.builtin.set_fact:
- data: "{{ result_data.json.Boot.BootOrder | default([]) }}"
-
- - name: Reversing the boot order
- ansible.builtin.set_fact:
- reverse_boot_order: "{{ data | reverse | list }}"
-
- name: Checking for LCStatus before running pre-requisite
ansible.builtin.uri:
<<: *uri_input
@@ -58,6 +41,12 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Waiting for the iDRAC to be available
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: Pre-requisite - Making sure boot mode is legacy
check_mode: false
@@ -67,6 +56,24 @@
boot_source_override_mode: legacy
tags: molecule-idempotence-notest
+ - name: Fetching boot order from iDRAC
+ ansible.builtin.uri:
+ <<: *uri_input
+ url: "{{ boot_order_uri }}"
+ method: GET
+ register: result_data
+ check_mode: false
+ no_log: true
+
+ - name: Extracing BootOrder from output
+ ansible.builtin.set_fact:
+ data: "{{ result_data.json.Boot.BootOrder | default([]) }}"
+
+ - name: Reversing the boot order
+ ansible.builtin.set_fact:
+ reverse_boot_order: "{{ data | reverse }}"
+ tags: molecule-idempotence-notest
+
- name: Checking for LCStatus after running pre-requisite
ansible.builtin.uri:
<<: *uri_input
@@ -80,12 +87,18 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Waiting for the iDRAC to be available
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: TC-115427 - Validate boot_order with legacy mode with force_restart
ansible.builtin.include_role:
name: "idrac_boot"
vars:
- boot_order: "{{ reverse_boot_order }}"
+ boot_order: "{{ reverse_boot_order | default(data) }}"
reset_type: force_restart
- name: Checking for LCStatus after performing operation
@@ -101,6 +114,12 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Waiting for the iDRAC to be available
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: Asserting TC-115427 in check mode
ansible.builtin.assert:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml
index 58cd441fe..48d24128a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml
@@ -29,23 +29,6 @@
timeout: "{{ https_timeout }}"
no_log: true
- - name: Fetching boot order from iDRAC
- ansible.builtin.uri:
- <<: *uri_input
- url: "{{ boot_order_uri }}"
- method: GET
- register: result_data
- check_mode: false
- no_log: true
-
- - name: Extracing BootOrder from output
- ansible.builtin.set_fact:
- data: "{{ result_data.json.Boot.BootOrder | default([]) }}"
-
- - name: Reversing the boot order
- ansible.builtin.set_fact:
- reverse_boot_order: "{{ data | reverse | list }}"
-
- name: Checking for LCStatus before running pre-requisite
ansible.builtin.uri:
<<: *uri_input
@@ -58,6 +41,12 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Wait for 60 seconds
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: Pre-requisite - Making sure boot mode is uefi
check_mode: false
@@ -67,6 +56,24 @@
boot_source_override_mode: uefi
tags: molecule-idempotence-notest
+ - name: Fetching boot order from iDRAC
+ ansible.builtin.uri:
+ <<: *uri_input
+ url: "{{ boot_order_uri }}"
+ method: GET
+ register: result_data
+ check_mode: false
+ no_log: true
+
+ - name: Extracing BootOrder from output
+ ansible.builtin.set_fact:
+ data: "{{ result_data.json.Boot.BootOrder | default([]) }}"
+
+ - name: Reversing the boot order
+ ansible.builtin.set_fact:
+ reverse_boot_order: "{{ data | reverse | list }}"
+ tags: molecule-idempotence-notest
+
- name: Checking for LCStatus after running pre-requisite
ansible.builtin.uri:
<<: *uri_input
@@ -75,17 +82,22 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Wait for 60 seconds
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: TC-115428 - Validate boot_order with uefi mode with graceful_restart
ansible.builtin.include_role:
name: "idrac_boot"
vars:
- boot_order: "{{ reverse_boot_order }}"
+ boot_order: "{{ reverse_boot_order | default(data) }}"
reset_type: graceful_restart
- name: Checking for LCStatus after performing operation
@@ -96,11 +108,11 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
- name: Asserting TC-115428 in check mode
ansible.builtin.assert:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
index 076cdd755..a1e11acdd 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
@@ -40,6 +40,12 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Wait for 60 seconds
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: Pre-requisite - Making sure boot_source_override_enabled is disabled
check_mode: false
@@ -57,11 +63,16 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Wait for 60 seconds
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: TC-115433 - Validate boot_source_override_enabled as continuous
ansible.builtin.include_role:
@@ -83,6 +94,12 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
+
+ - name: Wait for 60 seconds
+ ansible.builtin.wait_for:
+ timeout: 60
+ tags: molecule-idempotence-notest
- name: Asserting TC-115433 in check mode
ansible.builtin.assert:
@@ -93,10 +110,4 @@
- name: Asserting TC-115433 in normal mode
ansible.builtin.assert:
that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
- when: not ansible_check_mode and idrac_boot_out.changed
-
- - name: Asserting TC-115433 in idempotence mode
- ansible.builtin.assert:
- that:
- - idrac_boot_out.msg == "No changes found to be applied."
- when: not ansible_check_mode and not idrac_boot_out.changed
+ when: not ansible_check_mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml
index ed97d539c..608be28b1 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml
@@ -1 +1,7 @@
---
+scenario:
+ test_sequence:
+ - create
+ - check
+ - converge
+ - destroy
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
index 50bb281b2..8fc9af6dc 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
@@ -57,7 +57,6 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
@@ -90,13 +89,8 @@
when: ansible_check_mode
tags: molecule-idempotence-notest
- - name: Asserting TC-115432 in normal mode
- ansible.builtin.assert:
- that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
- when: not ansible_check_mode and idrac_boot_out.changed
-
- - name: Asserting TC-115432 in idempotence mode
+ - name: Asserting TC-115432 in normal mode or idempotence mode
ansible.builtin.assert:
that:
- - idrac_boot_out.msg == "No changes found to be applied."
+ - 'idrac_boot_out.msg == "The boot settings job is triggered successfully." or "No changes found to be applied."'
when: not ansible_check_mode and not idrac_boot_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
index 86b7b39ea..85b84477f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
@@ -58,7 +58,6 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
@@ -78,11 +77,12 @@
method: GET
register: job_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
- until: job_status_result.json.LCStatus == "Ready"
+ when: not ansible_check_mode # noqa: no-handler
+ until: job_status_result.json.JobState == "Completed"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
- name: Checking for LCStatus after performing operation
ansible.builtin.uri:
@@ -92,7 +92,6 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
@@ -104,13 +103,8 @@
when: ansible_check_mode
tags: molecule-idempotence-notest
- - name: Asserting TC-115429 in normal mode
- ansible.builtin.assert:
- that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
- when: not ansible_check_mode and idrac_boot_out.changed
-
- - name: Asserting TC-115429 in idempotence mode
+ - name: Asserting TC-115429 in normal mode or idempotence mode
ansible.builtin.assert:
that:
- - idrac_boot_out.msg == "No changes found to be applied."
+ - 'idrac_boot_out.msg == "The boot settings job is triggered successfully." or "No changes found to be applied."'
when: not ansible_check_mode and not idrac_boot_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml
index a2b6ef922..3024c3678 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml
@@ -8,6 +8,7 @@
password: "{{ lookup('ansible.builtin.env', 'IDRAC_PASSWORD') }}"
validate_certs: false
system_uri: "https://{{ hostname }}:{{ https_port }}/redfish/v1/Systems"
+ job_status_uri: "https://{{ hostname }}:{{ https_port }}/redfish/v1/Managers/iDRAC.Embedded.1/Jobs"
lc_uri: "https://{{ hostname }}:{{ https_port }}/redfish/v1/Dell/Managers/iDRAC.Embedded.1/DellLCService/Actions/DellLCService.GetRemoteServicesAPIStatus"
retry_count: 60
delay_count: 30
@@ -54,6 +55,7 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
- name: Pre-requisite - Making sure boot mode is legacy
check_mode: false
@@ -71,7 +73,6 @@
body: {}
register: lc_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
until: lc_status_result.json.LCStatus == "Ready"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
@@ -91,11 +92,12 @@
method: GET
register: job_status_result
check_mode: false
- when: idrac_boot_out.changed # noqa: no-handler
- until: job_status_result.json.LCStatus == "Ready"
+ when: not ansible_check_mode # noqa: no-handler
+ until: job_status_result.json.JobState == "Completed"
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
- name: Checking for LCStatus after performing operation
ansible.builtin.uri:
@@ -110,6 +112,7 @@
retries: "{{ retry_count }}"
delay: "{{ delay_count }}"
no_log: true
+ tags: molecule-idempotence-notest
- name: Asserting TC-115430 in check mode
ansible.builtin.assert:
@@ -119,7 +122,7 @@
- name: Asserting TC-115430 in normal mode
ansible.builtin.assert:
- that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
+ that: idrac_boot_out.msg == "Successfully updated the boot settings."
when: not ansible_check_mode and idrac_boot_out.changed
- name: Asserting TC-115430 in idempotence mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CA/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CA/converge.yml
index 64e2a242f..3e7bb67c6 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CA/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CA/converge.yml
@@ -21,9 +21,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -55,9 +55,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CSC/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CSC/converge.yml
index 2a8708f27..a78651306 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CSC/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CSC/converge.yml
@@ -22,9 +22,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -57,9 +57,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
@@ -98,9 +98,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -133,9 +133,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CTC/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CTC/converge.yml
index cdf53ff08..5186af134 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CTC/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CTC/converge.yml
@@ -21,9 +21,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -55,9 +55,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CustomCertificate/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CustomCertificate/converge.yml
index 0f07f68ca..119105b5a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CustomCertificate/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/CustomCertificate/converge.yml
@@ -15,9 +15,9 @@
ansible.builtin.include_tasks:
file: ../__extract_firmware_version.yml
vars:
- idrac_ip: "{{ lookup('env', 'hostname') }}"
- idrac_user: "{{ lookup('env', 'username') }}"
- idrac_password: "{{ lookup('env', 'password') }}"
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- name: Set expected firmware version
ansible.builtin.set_fact:
@@ -40,9 +40,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -97,9 +97,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
@@ -156,9 +156,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/HTTPS/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/HTTPS/converge.yml
index 28cdf16b8..610fdccd2 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/HTTPS/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/HTTPS/converge.yml
@@ -21,9 +21,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -55,9 +55,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/SSLKEY/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/SSLKEY/converge.yml
index c90e4e53e..4937fb8e5 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/SSLKEY/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/SSLKEY/converge.yml
@@ -13,9 +13,9 @@
ansible.builtin.include_tasks:
file: ../__extract_firmware_version.yml
vars:
- idrac_ip: "{{ lookup('env', 'hostname') }}"
- idrac_user: "{{ lookup('env', 'username') }}"
- idrac_password: "{{ lookup('env', 'password') }}"
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- name: Set expected firmware version
ansible.builtin.set_fact:
@@ -38,9 +38,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
index 56c26b4a6..021cec95f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
@@ -39,9 +39,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "export"
@@ -62,9 +62,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -84,9 +84,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -106,9 +106,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "import"
@@ -128,8 +128,8 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'invalid_password') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
@@ -153,8 +153,8 @@
name: dellemc.openmanage.idrac_certificate
vars:
hostname: "999.999.999.999"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "reset"
certificate_type: "HTTPS"
@@ -173,9 +173,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: invalid
- password: "{{ lookup('env', 'password') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "reset"
certificate_type: "HTTPS"
@@ -193,8 +193,8 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: invalid
validate_certs: false
command: "reset"
@@ -213,9 +213,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "invalid"
certificate_type: "HTTPS"
@@ -233,9 +233,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "export"
certificate_type: "HTTPS"
@@ -256,9 +256,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "import"
certificate_type: "CSC"
@@ -279,9 +279,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "generate_csr"
certificate_type: "HTTPS"
@@ -307,9 +307,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "import"
certificate_type: "CUSTOMCERTIFICATE"
@@ -332,9 +332,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "import"
certificate_type: "CUSTOMCERTIFICATE"
@@ -356,9 +356,9 @@
ansible.builtin.import_role:
name: dellemc.openmanage.idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: "import"
certificate_type: "HTTPS"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/generateCSR/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/generateCSR/converge.yml
index 9f57c7e84..ee37021b9 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/generateCSR/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/generateCSR/converge.yml
@@ -16,9 +16,9 @@
ansible.builtin.import_role:
name: idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "generate_csr"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/reset/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/reset/converge.yml
index 8a3e23ab5..1d78d3389 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/reset/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/reset/converge.yml
@@ -10,9 +10,9 @@
ansible.builtin.import_role:
name: idrac_certificate
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ca_path: "{{ ca_cert_path }}"
command: "reset"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/cleanup.yml b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/cleanup.yml
index 9ade81e90..cccd89d7b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/cleanup.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/cleanup.yml
@@ -14,18 +14,18 @@
nfs_mount_path: "{{ lookup('env', 'nfs_mount_path') }}"
cifs_mount_path: "{{ lookup('env', 'cifs_mount_path') }}"
- nfs_url: "{{ lookup('env', 'NFS_URL') }}"
- cifs_url: "{{ lookup('env', 'CIFS_URL') }}"
- cifs_username: "{{ lookup('env', 'CIFS_USERNAME') }}"
- cifs_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ nfs_url: "{{ lookup('env', 'nfs_url') }}"
+ cifs_url: "{{ lookup('env', 'cifs_url') }}"
+ cifs_username: "{{ lookup('env', 'cifs_username') }}"
+ cifs_password: "{{ lookup('env', 'cifs_password') }}"
- https_url: "{{ lookup('env', 'HTTPS_URL') }}"
- https_username: "{{ lookup('env', 'HTTPS_USERNAME') }}"
- https_password: "{{ lookup('env', 'HTTPS_PASSWORD') }}"
+ https_url: "{{ lookup('env', 'https_url') }}"
+ https_username: "{{ lookup('env', 'https_username') }}"
+ https_password: "{{ lookup('env', 'https_password') }}"
- http_url: "{{ lookup('env', 'HTTP_URL') }}"
- http_username: "{{ lookup('env', 'HTTP_USERNAME') }}"
- http_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ http_url: "{{ lookup('env', 'http_url') }}"
+ http_username: "{{ lookup('env', 'http_username') }}"
+ http_password: "{{ lookup('env', 'http_password') }}"
tasks:
- name: Checking file exists in NFS mount localhost
ansible.builtin.stat:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/converge.yml
index 8073a85bc..7b4e215c2 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/converge.yml
@@ -10,18 +10,18 @@
nfs_mount_path: "{{ lookup('env', 'nfs_mount_path') }}"
cifs_mount_path: "{{ lookup('env', 'nfs_mount_path') }}"
- nfs_url: "{{ lookup('env', 'NFS_URL') }}"
- cifs_url: "{{ lookup('env', 'CIFS_URL') }}"
- cifs_username: "{{ lookup('env', 'CIFS_USERNAME') }}"
- cifs_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ nfs_url: "{{ lookup('env', 'nfs_url') }}"
+ cifs_url: "{{ lookup('env', 'cifs_url') }}"
+ cifs_username: "{{ lookup('env', 'cifs_username') }}"
+ cifs_password: "{{ lookup('env', 'cifs_password') }}"
- https_url: "{{ lookup('env', 'HTTPS_URL') }}"
- https_username: "{{ lookup('env', 'HTTPS_USERNAME') }}"
- https_password: "{{ lookup('env', 'HTTPS_PASSWORD') }}"
+ https_url: "{{ lookup('env', 'https_url') }}"
+ https_username: "{{ lookup('env', 'https_username') }}"
+ https_password: "{{ lookup('env', 'https_password') }}"
- http_url: "{{ lookup('env', 'HTTP_URL') }}"
- http_username: "{{ lookup('env', 'HTTP_USERNAME') }}"
- http_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ http_url: "{{ lookup('env', 'http_url') }}"
+ http_username: "{{ lookup('env', 'http_username') }}"
+ http_password: "{{ lookup('env', 'http_password') }}"
gather_facts: false
tasks:
- name: Exporting SCP local path with all components
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/verify.yml b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/verify.yml
index 25206e2d3..dc1cd1815 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/verify.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_export_server_config_profile/molecule/default/verify.yml
@@ -14,18 +14,18 @@
nfs_mount_path: "{{ lookup('env', 'nfs_mount_path') }}"
cifs_mount_path: "{{ lookup('env', 'cifs_mount_path') }}"
- nfs_url: "{{ lookup('env', 'NFS_URL') }}"
- cifs_url: "{{ lookup('env', 'CIFS_URL') }}"
- cifs_username: "{{ lookup('env', 'CIFS_USERNAME') }}"
- cifs_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ nfs_url: "{{ lookup('env', 'nfs_url') }}"
+ cifs_url: "{{ lookup('env', 'cifs_url') }}"
+ cifs_username: "{{ lookup('env', 'cifs_username') }}"
+ cifs_password: "{{ lookup('env', 'cifs_password') }}"
- https_url: "{{ lookup('env', 'HTTPS_URL') }}"
- https_username: "{{ lookup('env', 'HTTPS_USERNAME') }}"
- https_password: "{{ lookup('env', 'HTTPS_PASSWORD') }}"
+ https_url: "{{ lookup('env', 'https_url') }}"
+ https_username: "{{ lookup('env', 'https_username') }}"
+ https_password: "{{ lookup('env', 'https_password') }}"
- http_url: "{{ lookup('env', 'HTTP_URL') }}"
- http_username: "{{ lookup('env', 'HTTP_USERNAME') }}"
- http_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ http_url: "{{ lookup('env', 'http_url') }}"
+ http_username: "{{ lookup('env', 'http_username') }}"
+ http_password: "{{ lookup('env', 'http_password') }}"
tasks:
- name: Checking exported file exists in Local path
ansible.builtin.stat:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/cifs_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/cifs_share/converge.yml
index 161a35cf4..1ebd565bb 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/cifs_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/cifs_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'cifsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/default/converge.yml
index bc30806f4..43a3ece9b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/default/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
@@ -31,9 +31,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
@@ -56,9 +56,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
@@ -80,9 +80,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/ftp_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/ftp_share/converge.yml
index a94da723a..48ecbc93e 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/ftp_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/ftp_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'ftpshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/http_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/http_share/converge.yml
index 82df756b5..c34cc7d32 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/http_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/http_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/https_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/https_share/converge.yml
index a94983cae..b1f7805ad 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/https_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/https_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/httpsproxy_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/httpsproxy_share/converge.yml
index b4bd4bdc1..d4b44b358 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/httpsproxy_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/httpsproxy_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsproxy') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
@@ -21,8 +21,8 @@
proxy_server: "{{ lookup('env', 'proxyserver') }}"
proxy_type: "HTTP"
proxy_port: 3128
- proxy_uname: "{{ lookup('env', 'proxyuname') }}"
- proxy_passwd: "{{ lookup('env', 'proxypass') }}"
+ proxy_uname: "{{ lookup('env', 'proxy_username') }}"
+ proxy_passwd: "{{ lookup('env', 'proxy_password') }}"
catalog_file_name: "Catalog.xml"
- name: "Verifying update firmware from repository on a HTTPS via parameter proxy share in check mode"
@@ -48,9 +48,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
reboot: true
job_wait: true
@@ -81,9 +81,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsproxy') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/negative_scenarios/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/negative_scenarios/converge.yml
index 37b959272..98800e160 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/negative_scenarios/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/negative_scenarios/converge.yml
@@ -8,15 +8,13 @@
name: idrac_firmware
vars:
hostname: "invalidHostname"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
share_name: "{{ lookup('env', 'httpshare') }}"
catalog_file_name: "Catalog.xml"
reboot: true
job_wait: true
apply_update: true
- ignore_errors: true
- register: idrac_firmware_result
- name: "Verifying Updating firmware with an invalid hostname"
ansible.builtin.assert:
@@ -27,9 +25,9 @@
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "invalidUsername"
- password: "{{ lookup('env', 'password') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
share_name: "{{ lookup('env', 'httpshare') }}"
catalog_file_name: "Catalog.xml"
reboot: true
@@ -47,10 +45,10 @@
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "invalidPassword"
- share_name: "{{ lookup('env', 'httpshare') }}"
+ share_name: "{{ lookup('env', 'httpsshare') }}"
catalog_file_name: "Catalog.xml"
reboot: true
job_wait: true
@@ -67,11 +65,11 @@
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
- share_name: "{{ lookup('env', 'httpshare') }}"
- ca_path: "{{ lookup('env', 'capath') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'httpsshare') }}"
+ ca_path: "/invalid/ca/path.cert"
catalog_file_name: "Catalog.xml"
reboot: true
job_wait: true
@@ -82,15 +80,15 @@
- name: "Verifying Updating firmware with an invalid ca_path"
ansible.builtin.assert:
that:
- - idrac_firmware_out.msg == "Firmware update failed."
+ - "'Incorrect username or password, unreachable iDRAC IP or a failure in TLS/SSL handshake.' in idrac_firmware_out.msg"
- name: Updating firmware with catalog file without extension
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
share_name: "{{ lookup('env', 'httpshare') }}"
reboot: true
job_wait: true
@@ -108,9 +106,9 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "invalidUser"
@@ -118,21 +116,24 @@
reboot: true
job_wait: true
apply_update: true
- catalog_file_name: "Catalog.xml"
+ catalog_file_name: "catalog.xml"
+ ignore_errors: true
+ register: idrac_firmware_result
- name: "Verifying update firmware from repository on a HTTPS Share with invalid share_user"
ansible.builtin.assert:
that:
- - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
- or same version installed on the server."
+ # - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
+ # or same version installed on the server."
+ - "'The specified user credentials necessary for downloading an update package were not correct.' in idrac_firmware_out.msg"
- name: Update firmware from repository on HTTPS Share with invalid share_password
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
@@ -140,24 +141,26 @@
reboot: true
job_wait: true
apply_update: true
- catalog_file_name: "Catalog.xml"
+ catalog_file_name: "catalog.xml"
+ ignore_errors: true
register: idrac_firmware_result
- name: "Verifying update firmware from repository on a HTTPS Share with invalid share_password"
ansible.builtin.assert:
that:
- - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
- or same version installed on the server."
+ # - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
+ # or same version installed on the server."
+ - "'The specified user credentials necessary for downloading an update package were not correct.' in idrac_firmware_out.msg"
- name: Update firmware from repository on a HTTPS via parameter proxy Share with invalid proxy_uname
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
- share_name: "{{ lookup('env', 'httpsproxy') }}"
+ share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
share_password: "{{ lookup('env', 'sharepassword') }}"
reboot: true
@@ -166,10 +169,10 @@
proxy_support: "ParametersProxy"
proxy_server: "{{ lookup('env', 'proxyserver') }}"
proxy_type: "HTTP"
- proxy_port: 3128
- proxy_uname: "invalidUname"
- proxy_passwd: "{{ lookup('env', 'proxypass') }}"
- catalog_file_name: "Catalog.xml"
+ proxy_port: "{{ lookup('env', 'proxyport') }}"
+ proxy_uname: "{{ lookup('env', 'proxy_username') }}"
+ proxy_passwd: "{{ lookup('env', 'proxy_password') }}"
+ catalog_file_name: "catalog.xml"
- name: "Verifying update firmware from repository on a HTTPS via parameter proxy share with invalid proxy_uname"
ansible.builtin.assert:
@@ -181,11 +184,11 @@
ansible.builtin.import_role:
name: "idrac_firmware"
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
- share_name: "{{ lookup('env', 'httpsproxy') }}"
+ share_name: "{{ lookup('env', 'httpsshare') }}"
share_user: "{{ lookup('env', 'shareuser') }}"
share_password: "{{ lookup('env', 'sharepassword') }}"
reboot: true
@@ -194,13 +197,17 @@
proxy_support: "ParametersProxy"
proxy_server: "{{ lookup('env', 'proxyserver') }}"
proxy_type: "HTTP"
- proxy_port: 3128
- proxy_uname: "{{ lookup('env', 'proxyuname') }}"
+ proxy_port: "{{ lookup('env', 'proxyport') }}"
+ proxy_uname: "{{ lookup('env', 'proxy_username') }}"
proxy_passwd: "invalidPasswd"
- catalog_file_name: "Catalog.xml"
+ catalog_file_name: "catalog.xml"
+ ignore_errors: true
+ register: idrac_firmware_result
- name: "Verifying update firmware from repository on a HTTPS via parameter proxy share with invalid proxy_passwd"
ansible.builtin.assert:
that:
- - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
- or same version installed on the server."
+ # - idrac_firmware_out.msg == "Unable to complete the operation because the catalog name entered has either unsupported firmware packages
+ # or same version installed on the server."
+ - "'Unable to transfer file' in idrac_firmware_out.msg"
+ - "'because of some unknown reasons.' in idrac_firmware_out.msg"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/nfs_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/nfs_share/converge.yml
index 89e55838c..d9d496f3a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/nfs_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_firmware/molecule/nfs_share/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: idrac_firmware
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
share_name: "{{ lookup('env', 'nfsshare') }}"
validate_certs: false
reboot: true
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
index adb6fcf5f..9aab189c2 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
@@ -4,9 +4,9 @@
connection: local
gather_facts: true
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- PCIeSSDBackPlane
@@ -23,22 +23,44 @@
- 404
- -1
idrac_gather_facts_uri_return_content: true
- diff_data: {}
- exclude_keys: []
tasks:
+ - name: Collecting PCIeSSDBackPlane information from API
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}/redfish/v1/Chassis/Oem/Dell/DellPCIeSSDBackPlanes"
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ method: "{{ idrac_gather_facts_uri_method }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ headers: "{{ idrac_gather_facts_uri_headers }}"
+ body_format: "{{ idrac_gather_facts_uri_body_format }}"
+ status_code: "{{ idrac_gather_facts_uri_status_code }}"
+ return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ register: backplane_result_url
+ no_log: true
+
+ - name: Validate whether PCIeSSDBackPlane information exists
+ ansible.builtin.assert:
+ that:
+ - "{{ backplane_result_url.json.Members | length > 0 }}"
+ fail_msg: "PCIeSSDBackPlane information does not exist"
+
+ - name: Set PCIeSSDBackPlane facts
+ ansible.builtin.set_fact:
+ api_response: "{{ backplane_result_url.json.Members | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type']) }}"
+
- name: Gather Facts for the PCIeSSDBackPlane component
ansible.builtin.include_role:
name: "idrac_gather_facts"
- - name: Assert backplane dict for length
+ - name: Check whether output differs
+ ansible.builtin.set_fact:
+ result_diff: "{{ backplane | symmetric_difference(api_response) }}"
+
+ - name: Assert the differences in List
ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
that:
- - "{{ backplane | length > 0 }}"
-
- - name: Call assertion
- ansible.builtin.include_tasks: ../../tests/asserts/backplane_assert.yml
- with_items: "{{ backplane }}"
- loop_control:
- loop_var: backplane_data
- when: backplane | length > 0
+ - "{{ (result_diff | length) == 0 }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/bios/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/bios/converge.yml
index 491d49d42..6d8bf0b66 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/bios/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/bios/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- BIOS
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/controller/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/controller/converge.yml
index e7059f6a7..8e6ade139 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/controller/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/controller/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Controller
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/default/converge.yml
index 3d3f3ed1d..58d622a57 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/default/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- System
@@ -68,10 +68,8 @@
- name: Response filter
ansible.builtin.set_fact:
api_response:
- "{{ system_result.json | json_query(jquery) | combine(os_result.json.Attributes) |
+ "{{ system_result.json.Oem.Dell.DellSystem | combine(os_result.json.Attributes) |
ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type']) }}"
- vars:
- jquery: "Oem.Dell.DellSystem"
- name: Set the keys diff
ansible.builtin.set_fact:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosure/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosure/converge.yml
index f83d84ac7..c43f9390a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosure/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosure/converge.yml
@@ -4,9 +4,9 @@
connection: local
gather_facts: true
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Enclosure
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosureemm/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosureemm/converge.yml
index 9bddda5a7..e240180b1 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosureemm/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/enclosureemm/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- EnclosureEMM
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/fan/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/fan/converge.yml
index bdd47a873..7b17ce7dd 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/fan/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/fan/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Fan
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/firmware/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/firmware/converge.yml
index 88047ce5c..a1fa26117 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/firmware/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/firmware/converge.yml
@@ -4,9 +4,9 @@
connection: local
gather_facts: true
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Firmware
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/hostnic/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/hostnic/converge.yml
index 1ab1f4911..c7dff509f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/hostnic/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/hostnic/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- HostNIC
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/idrac/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/idrac/converge.yml
index 2b8788274..19e8bf7f5 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/idrac/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/idrac/converge.yml
@@ -4,9 +4,9 @@
connection: local
gather_facts: true
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- IDRAC
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/license/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/license/converge.yml
index b1fe0419b..8e5609448 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/license/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/license/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- License
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/memory/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/memory/converge.yml
index 5a3909481..b21c6ecdc 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/memory/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/memory/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Memory
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/negative/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/negative/converge.yml
index b191098a8..6062027e6 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/negative/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/negative/converge.yml
@@ -8,8 +8,8 @@
name: idrac_gather_facts
vars:
hostname: "randomHostname"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
ignore_errors: true
ignore_unreachable: true
@@ -24,9 +24,9 @@
ansible.builtin.import_role:
name: idrac_gather_facts
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "randomUsername"
- password: "{{ lookup('env', 'password') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ["Bios"]
ignore_errors: true
@@ -42,8 +42,8 @@
ansible.builtin.import_role:
name: idrac_gather_facts
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "randomPassword"
validate_certs: false
target: ["Bios"]
@@ -56,30 +56,35 @@
that:
- idrac_gather_facts_connection.status == -1
+ - name: Set computer system id
+ ansible.builtin.set_fact:
+ system_id: "randomSystemID"
+
- name: To check for wrong system id
ansible.builtin.import_role:
name: idrac_gather_facts
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
- computer_system_id: "randomSystemID"
+ computer_system_id: "{{ system_id }}"
ignore_errors: true
register: idrac_gather_facts_error
- name: Asserting after performing operation with invalid system id
ansible.builtin.assert:
that:
- - "{{ computer_system_id not in system_ids }}"
+ - "{{ computer_system_id is not in system_ids}}"
+ when: system_ids is defined
- name: To check for wrong manager id
ansible.builtin.import_role:
name: idrac_gather_facts
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
manager_id: "randomManagerID"
target: ["Firmware"]
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/nic/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/nic/converge.yml
index 70d00f200..c12fee4af 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/nic/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/nic/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- NIC
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
index 93de081d3..c4d86a62b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- PresenceAndStatusSensor
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/pciedevice/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/pciedevice/converge.yml
index b87459d13..924a8fd39 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/pciedevice/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/pciedevice/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- PCIeDevice
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
index a5b66a7f9..ef2414535 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- PhysicalDisk
@@ -27,18 +27,60 @@
api_system: "/redfish/v1/Systems/System.Embedded.1"
tasks:
+ - name: Get Storage information.
+ ansible.builtin.uri: &uri_params
+ url: "https://{{ hostname }}{{ api_system }}/Storage"
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ method: "{{ idrac_gather_facts_uri_method }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ headers: "{{ idrac_gather_facts_uri_headers }}"
+ body_format: "{{ idrac_gather_facts_uri_body_format }}"
+ status_code: "{{ idrac_gather_facts_uri_status_code }}"
+ return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ register: idrac_gather_facts_controllers_result
+
+ - name: Get all storage controller ids.
+ ansible.builtin.set_fact:
+ storage_ids_list: "{{ idrac_gather_facts_controllers_result.json.Members | map('dict2items') | flatten | map(attribute='value') }}"
+
+ - name: Get physical disk information using API.
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}{{ item }}?$expand=*($levels=1)"
+ <<: *uri_params
+ loop: "{{ storage_ids_list }}"
+ no_log: true
+ register: idrac_gather_facts_disk_result
+
+ - name: Count the number of drives in drives_result.results
+ ansible.builtin.set_fact:
+ drives_count: "{{ idrac_gather_facts_disk_result.results | map(attribute='json.drives') | list | length }}"
+
+ - name: Validate whether physical disk information exists
+ ansible.builtin.assert:
+ that:
+ - "{{ drives_count | int > 0 }}"
+ fail_msg: "Physical Disk information does not exist"
+
+ - name: Set Physical Disk facts
+ ansible.builtin.set_fact:
+ api_response:
+ "{{ disk_result.results | selectattr('json', 'defined') | map(attribute='json') | selectattr('Drives', 'defined') |
+ map(attribute='Drives') | flatten | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type',
+ 'Actions', 'Assembly', 'Links', 'DellDriveSMARTAttributes', 'DellNVMeSMARTAttributes', 'Operations@odata.count']) }}"
+
- name: Gather Facts for the Physical Disk component
ansible.builtin.include_role:
name: "idrac_gather_facts"
- - name: Assert physical disk dict for length
+ - name: Check whether output differs
+ ansible.builtin.set_fact:
+ result_diff: "{{ physical_disk | symmetric_difference(api_response) }}"
+
+ - name: Assert the differences in List
ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
that:
- - "{{ physical_disk | length > 0 }}"
-
- - name: Call assertion
- ansible.builtin.include_tasks: ../../tests/asserts/physicaldisk_assert.yml
- with_items: "{{ physical_disk }}"
- loop_control:
- loop_var: pd_data
- when: physical_disk | length > 0
+ - "{{ (result_diff | length) == 0 }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
index 1fdb5a278..0f648b875 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- PowerSupply
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/secureboot/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/secureboot/converge.yml
index e4585165e..88fdc4ea6 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/secureboot/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/secureboot/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- SecureBoot
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml
index feee7473e..ab19c7cd2 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Sensors_Battery
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml
index 274319cff..4fca00ba1 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Sensors_Intrusion
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml
index 16435ef11..378022256 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- Sensors_Voltage
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
index acd31a108..418aa2b78 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- SystemMetrics
@@ -88,13 +88,9 @@
register: response_power_supply
no_log: true
- - name: Set query
- ansible.builtin.set_fact:
- jq: "[*].Id"
-
- name: Get Power Supply Metrics ids
ansible.builtin.set_fact:
- psu_ids: "{{ power_result.json.Members | json_query(jq) }}"
+ psu_ids: "{{ power_result.json.Members | map(attribute='Id') | list }}"
- name: Call assertion for Power metrics
ansible.builtin.include_tasks: ../../tests/asserts/psmetrics_assert.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/virtualdisk/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/virtualdisk/converge.yml
index 27fd2b829..5c3486634 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/virtualdisk/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/virtualdisk/converge.yml
@@ -3,9 +3,9 @@
hosts: all
gather_facts: false
vars:
- hostname: "{{ lookup('env', 'hostname') }}"
- username: "{{ lookup('env', 'username') }}"
- password: "{{ lookup('env', 'password') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- VirtualDisk
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/backplane_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/backplane_assert.yml
deleted file mode 100644
index a4562f9e0..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/backplane_assert.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-- name: Get PCIeSSDBackPlanes information.
- ansible.builtin.uri:
- url: "https://{{ hostname }}/redfish/v1/Chassis/Oem/Dell/DellPCIeSSDBackPlanes/{{ backplane_data.Id }}"
- validate_certs: "{{ validate_certs }}"
- ca_path: "{{ ca_path | default(omit) }}"
- method: "{{ idrac_gather_facts_uri_method }}"
- user: "{{ username }}"
- password: "{{ password }}"
- headers: "{{ idrac_gather_facts_uri_headers }}"
- body_format: "{{ idrac_gather_facts_uri_body_format }}"
- status_code: "{{ idrac_gather_facts_uri_status_code }}"
- return_content: "{{ idrac_gather_facts_uri_return_content }}"
- register: backplane_result
- no_log: true
-
-- name: Set backplane facts
- ansible.builtin.set_fact:
- api_response: "{{ backplane_result.json | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type']) }}"
-
-- name: Set the keys diff
- ansible.builtin.set_fact:
- diff_keys: "{{ backplane_data.keys() | list | symmetric_difference((api_response.keys() | list)) }}"
-
-- name: Set a Diff of dict
- ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: backplane_data[item]}) }}"
- loop: "{{ backplane_data.keys() }}"
- when:
- - diff_keys | length == 0
- - backplane_data[item] != api_response[item]
- - item not in exclude_keys
-
-- name: Assert the difference in Keys
- ansible.builtin.assert:
- that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/physicaldisk_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/physicaldisk_assert.yml
deleted file mode 100644
index 76ec6624f..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/physicaldisk_assert.yml
+++ /dev/null
@@ -1,45 +0,0 @@
----
-- name: Get controller id
- ansible.builtin.set_fact:
- ctrl_id: "{{ pd_data.Id | split(':') | last }}"
-
-- name: Get Storage information.
- ansible.builtin.uri:
- url: "https://{{ hostname }}{{ api_system }}/Storage/{{ ctrl_id }}/Drives/{{ pd_data.Id }}"
- validate_certs: "{{ validate_certs }}"
- ca_path: "{{ ca_path | default(omit) }}"
- method: "{{ idrac_gather_facts_uri_method }}"
- user: "{{ username }}"
- password: "{{ password }}"
- headers: "{{ idrac_gather_facts_uri_headers }}"
- body_format: "{{ idrac_gather_facts_uri_body_format }}"
- status_code: "{{ idrac_gather_facts_uri_status_code }}"
- return_content: "{{ idrac_gather_facts_uri_return_content }}"
- no_log: true
- register: disk_result
-
-- name: Filter Physical Disk data
- ansible.builtin.set_fact:
- api_response: "{{ disk_result.json | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type',
- 'Actions', 'Assembly', 'Links', 'DellDriveSMARTAttributes', 'DellNVMeSMARTAttributes', 'Operations@odata.count']) }}"
-
-- name: Set the keys diff
- ansible.builtin.set_fact:
- diff_keys: "{{ pd_data.keys() | list | symmetric_difference((api_response.keys() | list)) }}"
-
-- name: Set a Diff of dict
- ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: pd_data[item]}) }}"
- loop: "{{ pd_data.keys() }}"
- when:
- - diff_keys | length == 0
- - pd_data[item] != api_response[item]
- - item not in exclude_keys
-
-- name: Assert the difference in Keys
- ansible.builtin.assert:
- that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml
index 29ff66275..df421143f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml
@@ -1,21 +1,32 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'cifs_url') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
+ scp_file: "{{ lookup('env', 'cifs_filename') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
+
- name: "Importing SCP from CIFS with ALL components"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'CIFS_URL') }}"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'cifs_filename') }}"
+ <<: *share_input
- name: Verifying Import SCP from CIFS with ALL components
ansible.builtin.assert:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/cifs_share/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/default/converge.yml
index c0ae89edf..46f902d78 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/default/converge.yml
@@ -7,14 +7,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['IDRAC']
share_parameters:
- share_user: "{{ lookup('env', 'USERNAME') }}"
- share_password: "{{ lookup('env', 'PASSWORD') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
scp_file: "{{ lookup('env', 'http_filename') }}"
ignore_errors: true
register: idrac_import_server_config_profile_status
@@ -28,15 +28,15 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['IDRAC']
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'USERNAME') }}"
- share_password: "{{ lookup('env', 'PASSWORD') }}"
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
ignore_errors: true
register: idrac_import_server_config_profile_status
@@ -50,14 +50,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'CIFS_URL') }}"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'cifs_url') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
scp_file: "invalid_file.xml"
ignore_errors: true
register: idrac_import_server_config_profile_status
@@ -76,14 +76,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
share_name: "192.168.0.1:/cifsshare"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
scp_file: "{{ lookup('env', 'cifs_filename') }}"
ignore_errors: true
register: idrac_import_server_config_profile_status
@@ -99,13 +99,13 @@
name: "idrac_import_server_config_profile"
vars:
hostname: "randomHostname"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'CIFS_URL') }}"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'cifs_url') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
scp_file: "{{ lookup('env', 'cifs_filename') }}"
ignore_errors: true
ignore_unreachable: true
@@ -121,14 +121,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "WrongUsername123"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'CIFS_URL') }}"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'cifs_url') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
scp_file: "{{ lookup('env', 'cifs_filename') }}"
ignore_errors: true
ignore_unreachable: true
@@ -143,14 +143,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "WrongPassword@123"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'CIFS_URL') }}"
- share_user: "{{ lookup('env', 'CIFS_USERNAME') }}"
- share_password: "{{ lookup('env', 'CIFS_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'cifs_url') }}"
+ share_user: "{{ lookup('env', 'cifs_username') }}"
+ share_password: "{{ lookup('env', 'cifs_password') }}"
scp_file: "{{ lookup('env', 'cifs_filename') }}"
ignore_errors: true
ignore_unreachable: true
@@ -165,15 +165,15 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['idrac']
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
scp_file: "{{ lookup('env', 'http_filename') }}"
ignore_errors: true
register: idrac_import_server_config_profile_status
@@ -188,15 +188,15 @@
# ansible.builtin.import_role:
# name: "idrac_import_server_config_profile"
# vars:
- # hostname: "{{ lookup('env', 'HOSTNAME') }}"
- # username: "{{ lookup('env', 'USERNAME') }}"
- # password: "{{ lookup('env', 'PASSWORD') }}"
+ # hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ # username: "{{ lookup('env', 'IDRAC_USER') }}"
+ # password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
# validate_certs: false
# target: ['IDRAC']
# share_parameters:
- # share_name: "{{ lookup('env', 'HTTP_URL') }}"
+ # share_name: "{{ lookup('env', 'http_url') }}"
# share_user: "WrongUsername123"
- # share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ # share_password: "{{ lookup('env', 'http_password') }}"
# scp_file: "{{ lookup('env', 'http_filename') }}"
# ignore_errors: true
# register: idrac_import_server_config_profile_status
@@ -217,14 +217,14 @@
# ansible.builtin.import_role:
# name: "idrac_import_server_config_profile"
# vars:
- # hostname: "{{ lookup('env', 'HOSTNAME') }}"
- # username: "{{ lookup('env', 'USERNAME') }}"
- # password: "{{ lookup('env', 'PASSWORD') }}"
+ # hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ # username: "{{ lookup('env', 'IDRAC_USER') }}"
+ # password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
# validate_certs: false
# target: ['IDRAC']
# share_parameters:
- # share_name: "{{ lookup('env', 'HTTP_URL') }}"
- # share_user: "{{ lookup('env', 'USERNAME') }}"
+ # share_name: "{{ lookup('env', 'http_url') }}"
+ # share_user: "{{ lookup('env', 'http_username') }}"
# share_password: "WrongPassword@123"
# scp_file: "{{ lookup('env', 'http_filename') }}"
# ignore_errors: true
@@ -241,14 +241,14 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['IDRAC']
share_parameters:
share_name: None
- share_user: "{{ lookup('env', 'USERNAME') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
share_password: "WrongPassword@123"
scp_file: "{{ lookup('env', 'http_filename') }}"
ignore_errors: true
@@ -263,15 +263,15 @@
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['ALL']
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
scp_file: "{{ lookup('env', 'http_filename') }}"
proxy_support: true
ignore_errors: true
@@ -287,21 +287,21 @@
# ansible.builtin.import_role:
# name: "idrac_import_server_config_profile"
# vars:
- # hostname: "{{ lookup('env', 'HOSTNAME') }}"
- # username: "{{ lookup('env', 'USERNAME') }}"
- # password: "{{ lookup('env', 'PASSWORD') }}"
+ # hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ # username: "{{ lookup('env', 'IDRAC_USER') }}"
+ # password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
# validate_certs: false
# target: ['ALL']
# share_parameters:
- # share_name: "{{ lookup('env', 'HTTP_URL') }}"
- # share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- # share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
+ # share_name: "{{ lookup('env', 'http_url') }}"
+ # share_user: "{{ lookup('env', 'http_username') }}"
+ # share_password: "{{ lookup('env', 'http_password') }}"
# scp_file: "{{ lookup('env', 'http_filename') }}"
# proxy_support: true
# proxy_type: http
# proxy_server: "randomProxyServer"
- # proxy_port: "{{ lookup('env', 'PROXY_PORT') }}"
- # proxy_password: "{{ lookup('env', 'PROXY_PASSWORD') }}"
+ # proxy_port: "{{ lookup('env', 'proxy_port') }}"
+ # proxy_password: "{{ lookup('env', 'proxy_password') }}"
# ignore_errors: true
# register: idrac_import_server_config_profile_status
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/converge.yml
index f9761ebc1..6ab64e12b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/converge.yml
@@ -1,23 +1,33 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
+ scp_file: "{{ lookup('env', 'http_filename') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
- name: "Importing SCP from HTTPS"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: "RAID"
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'http_filename') }}"
+ <<: *share_input
when: not ansible_check_mode
- name: Verifying Import SCP from HTTP with in normal mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml
index a0348544a..71d891f10 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml
@@ -1,27 +1,38 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
+ scp_file: "{{ lookup('env', 'http_filename') }}"
+ proxy_support: true
+ proxy_type: http
+ proxy_server: "{{ lookup('env', 'proxy_server') }}"
+ proxy_username: "{{ lookup('env', 'proxy_username') }}"
+ proxy_port: "{{ lookup('env', 'proxy_port') }}"
+ proxy_password: "{{ lookup('env', 'proxy_password') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
+
- name: "Importing SCP from HTTP with proxy parameters"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'http_filename') }}"
- proxy_support: true
- proxy_type: http
- proxy_server: "{{ lookup('env', 'PROXY_SERVER') }}"
- proxy_port: "{{ lookup('env', 'PROXY_PORT') }}"
- proxy_username: "{{ lookup('env', 'PROXY_USER') }}"
- proxy_password: "{{ lookup('env', 'PROXY_PASSWORD') }}"
+ <<: *share_input
when: not ansible_check_mode
- name: Verifying Import SCP from HTTP with proxy parameters in normal mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml
index b96730d75..f49f28438 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml
@@ -2,26 +2,37 @@
- name: Converge
hosts: all
gather_facts: false
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'http_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
+ scp_file: "{{ lookup('env', 'http_filename') }}"
+ proxy_support: true
+ proxy_server: "{{ lookup('env', 'proxy_server') }}"
+ proxy_username: "{{ lookup('env', 'proxy_username') }}"
+ proxy_port: "{{ lookup('env', 'proxy_port') }}"
+ proxy_password: "{{ lookup('env', 'proxy_password') }}"
+ ignore_certificate_warning: showerror
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
+
- name: "Importing SCP from HTTPS with ignore_certificate_warning as showerror"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'HTTP_URL') }}"
- share_user: "{{ lookup('env', 'HTTP_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTP_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'http_filename') }}"
- proxy_support: true
- proxy_server: "{{ lookup('env', 'PROXY_SERVER') }}"
- proxy_port: "{{ lookup('env', 'PROXY_PORT') }}"
- proxy_username: "{{ lookup('env', 'PROXY_USER') }}"
- proxy_password: "{{ lookup('env', 'PROXY_PASSWORD') }}"
- ignore_certificate_warning: showerror
+ <<: *share_input
when: not ansible_check_mode
- name: Verifying Import SCP from HTTP with ignore_certificate_warning as showerror in normal mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/converge.yml
index 7981a536c..cb61a0947 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/converge.yml
@@ -1,21 +1,32 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'https_url') }}"
+ share_user: "{{ lookup('env', 'http_username') }}"
+ share_password: "{{ lookup('env', 'http_password') }}"
+ scp_file: "{{ lookup('env', 'https_filename') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
+
- name: "Importing SCP from HTTPS"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'HTTPS_URL') }}"
- share_user: "{{ lookup('env', 'HTTPS_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTPS_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'https_filename') }}"
+ <<: *share_input
when: not ansible_check_mode
- name: Verifying Import SCP from HTTPS in normal mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml
index 013505814..2f10bff1f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml
@@ -1,27 +1,38 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'https_url') }}"
+ share_user: "{{ lookup('env', 'https_username') }}"
+ share_password: "{{ lookup('env', 'https_password') }}"
+ scp_file: "{{ lookup('env', 'https_filename') }}"
+ proxy_support: true
+ proxy_type: http
+ proxy_server: "{{ lookup('env', 'proxy_server') }}"
+ proxy_username: "{{ lookup('env', 'proxy_username') }}"
+ proxy_port: "{{ lookup('env', 'proxy_port') }}"
+ proxy_password: "{{ lookup('env', 'proxy_password') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
+
- name: "Importing SCP from HTTPS with proxy parameters"
ansible.builtin.import_role:
name: "idrac_import_server_config_profile"
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'HTTPS_URL') }}"
- share_user: "{{ lookup('env', 'HTTPS_USERNAME') }}"
- share_password: "{{ lookup('env', 'HTTPS_PASSWORD') }}"
- scp_file: "{{ lookup('env', 'https_filename') }}"
- proxy_support: true
- proxy_type: http
- proxy_server: "{{ lookup('env', 'PROXY_SERVER') }}"
- proxy_username: "{{ lookup('env', 'PROXY_USER') }}"
- proxy_port: "{{ lookup('env', 'PROXY_PORT') }}"
- proxy_password: "{{ lookup('env', 'PROXY_PASSWORD') }}"
+ <<: *share_input
when: not ansible_check_mode
- name: Verifying Import SCP from HTTPS with proxy parameters in normal mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml
index 3fbeec584..bbc8161cf 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: idrac_import_server_config_profile
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target:
- IDRAC
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml
index bd956dc0c..3f7ef166a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml
@@ -7,9 +7,9 @@
ansible.builtin.import_role:
name: idrac_import_server_config_profile
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
target: ['IDRAC']
import_buffer: '<SystemConfiguration><Component FQDD="iDRAC.Embedded.1"><Attribute Name="Time.1#Timezone">CST6CDT</Attribute>
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml
index 860e63b52..4e130f66a 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml
@@ -1,23 +1,24 @@
---
- name: Converge
hosts: all
+ vars:
+ _target: ['NIC', 'IDRAC']
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'nfs_url') }}"
+ scp_file: "{{ lookup('env', 'nfs_filename') }}"
gather_facts: false
tasks:
-
- name: "Importing SCP from NFS with multiple components"
ansible.builtin.import_role:
name: idrac_import_server_config_profile
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
- target:
- - 'NIC'
- - 'IDRAC'
+ target: "{{ _target }}"
share_parameters:
- share_name: "{{ lookup('env', 'NFS_URL') }}"
- scp_file: "{{ lookup('env', 'nfs_filename') }}"
+ <<: *share_input
shutdown_type: 'Forced'
end_host_power_state: 'On'
when: not ansible_check_mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/import_multiple_target/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml
index bb839b38b..14d3de043 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml
@@ -1,20 +1,30 @@
---
- name: Converge
hosts: all
+ vars:
+ share_input: &share_input
+ share_name: "{{ lookup('env', 'nfs_url') }}"
+ scp_file: "{{ lookup('env', 'nfs_filename') }}"
gather_facts: false
tasks:
+ - name: "Pre-requisites"
+ ansible.builtin.include_tasks:
+ file: "../resources/tests/export.yml"
+ vars:
+ _share_parameters:
+ <<: *share_input
+ tags: molecule-idempotence-notest
- name: "Importing SCP from NFS"
ansible.builtin.import_role:
name: idrac_import_server_config_profile
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
share_parameters:
- share_name: "{{ lookup('env', 'NFS_URL') }}"
- scp_file: "{{ lookup('env', 'nfs_filename') }}"
+ <<: *share_input
shutdown_type: 'Forced'
end_host_power_state: 'On'
when: not ansible_check_mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml
index e69de29bb..fc17009ba 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/tests/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/prepare.yml
deleted file mode 100644
index 5fadc24b5..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/nfs_share/prepare.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- name: Cleanup
- hosts: all
- gather_facts: false
- tasks:
- - name: Cleanup config
- ansible.builtin.include_tasks: ../resources/tests/prepare.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml
new file mode 100644
index 000000000..1cb9a04de
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml
@@ -0,0 +1,64 @@
+---
+- name: Cleanup
+ hosts: all
+ gather_facts: false
+ tasks:
+ - name: Setfact for target
+ ansible.builtin.set_fact:
+ idrac_import_server_config_profile_target: "{{ idrac_import_server_config_profile_target | default(['ALL']) }}"
+
+ - name: Cleanup when target is ['ALL'] or ['IDRAC']
+ ansible.builtin.import_role:
+ name: idrac_attributes
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ idrac_attributes:
+ Time.1.Timezone: "UTC" # "UTC"
+ when: "'ALL' in idrac_import_server_config_profile_target or 'IDRAC' in idrac_import_server_config_profile_target"
+
+ - name: Cleanup when target is ['BIOS']
+ dellemc.openmanage.idrac_bios:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ attributes:
+ NumLock: "On" # "On"
+ when: "'BIOS' in idrac_import_server_config_profile_target"
+
+ - name: Calling nic_helper.yml
+ ansible.builtin.include_tasks:
+ file: "nic_helper.yml"
+ when: "'NIC' in idrac_import_server_config_profile_target"
+
+ - name: Cleanup when target is ['NIC']
+ dellemc.openmanage.idrac_network_attributes:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ network_adapter_id: "{{ inp_network_adapter_id }}"
+ network_device_function_id: "{{ inp_network_device_function_id }}"
+ apply_time: Immediate
+ oem_network_attributes:
+ BannerMessageTimeout: "5" # "5"
+ when: "'NIC' in idrac_import_server_config_profile_target"
+
+ - name: Calling raid_helper.yml
+ ansible.builtin.include_tasks:
+ file: "raid_helper.yml"
+ when: "'RAID' in idrac_import_server_config_profile_target"
+
+ - name: Cleanup when target is ['RAID']
+ dellemc.openmanage.idrac_redfish_storage_controller:
+ baseuri: "{{ lookup('env', 'IDRAC_IP') }}:{{ https_port }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ controller_id: "{{ inp_controller_id }}"
+ attributes:
+ SupportedInitializationTypes: "Fast"
+ when: "'RAID' in idrac_import_server_config_profile_target"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml
new file mode 100644
index 000000000..f0b8d9024
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml
@@ -0,0 +1,72 @@
+---
+- name: Setfact for target
+ ansible.builtin.set_fact:
+ idrac_import_server_config_profile_target: "{{ idrac_import_server_config_profile_target | default(['ALL']) }}"
+
+- name: Performing export of server config profile
+ ansible.builtin.import_role:
+ name: idrac_export_server_config_profile
+ vars:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ target: "{{ idrac_import_server_config_profile_target }}"
+ share_parameters: "{{ _share_parameters }}"
+ when: _share_parameters is defined
+
+- name: Making changes when target is ['ALL'] or ['IDRAC']
+ ansible.builtin.import_role:
+ name: idrac_attributes
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ idrac_attributes:
+ Time.1.Timezone: "CST6CDT"
+ when: "'ALL' in idrac_import_server_config_profile_target or 'IDRAC' in idrac_import_server_config_profile_target"
+
+- name: Making changes when target is ['BIOS']
+ dellemc.openmanage.idrac_bios:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ attributes:
+ NumLock: "Off"
+ when: "'BIOS' in idrac_import_server_config_profile_target"
+
+- name: Calling nic_helper.yml
+ ansible.builtin.include_tasks:
+ file: "nic_helper.yml"
+ when: "'NIC' in idrac_import_server_config_profile_target"
+
+- name: Making changes when target is ['NIC']
+ dellemc.openmanage.idrac_network_attributes:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ network_adapter_id: "{{ inp_network_adapter_id }}"
+ network_device_function_id: "{{ inp_network_device_function_id }}"
+ apply_time: Immediate
+ oem_network_attributes:
+ BannerMessageTimeout: "4"
+ when: "'NIC' in idrac_import_server_config_profile_target"
+
+- name: Calling raid_helper.yml
+ ansible.builtin.include_tasks:
+ file: "raid_helper.yml"
+ when: "'RAID' in idrac_import_server_config_profile_target"
+
+- name: Making changes when target is ['RAID']
+ dellemc.openmanage.idrac_redfish_storage_controller:
+ baseuri: "{{ lookup('env', 'IDRAC_IP') }}:{{ https_port }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ controller_id: "{{ inp_controller_id }}"
+ attributes:
+ SupportedInitializationTypes: "Slow"
+ when: "'RAID' in idrac_import_server_config_profile_target"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml
new file mode 100644
index 000000000..a8211d70a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml
@@ -0,0 +1,39 @@
+- name: Helper file for NIC
+ block:
+ - name: Fetch network adapter id
+ ansible.builtin.uri: &network_uri
+ url: "https://{{ idrac_ip | ansible.utils.ipwrap }}:{{ idrac_port }}/redfish/v1/Systems/System.Embedded.1/NetworkAdapters"
+ user: "{{ idrac_user }}"
+ password: "{{ idrac_password }}"
+ method: GET
+ body: {}
+ validate_certs: false
+ body_format: json
+ return_content: true
+ status_code: 200
+ headers: 'Accept=application/json'
+ force_basic_auth: true
+ register: fetched_network_adapter_id
+
+ - name: Extracting network adapter id
+ ansible.builtin.set_fact:
+ network_adapter_id: "{{ fetched_network_adapter_id.content | from_json | json_query('Members[0]') }}"
+
+ - name: Setting network adapter id
+ ansible.builtin.set_fact:
+ inp_network_adapter_id: "{{ '@odata.id' | extract(network_adapter_id) | split('/') | last }}"
+
+ - name: Fetch network device function id
+ ansible.builtin.uri:
+ <<: *network_uri
+ url: "https://{{ idrac_ip | ansible.utils.ipwrap }}:{{ idrac_port }}/redfish/v1/Systems\
+ /System.Embedded.1/NetworkAdapters/{{ inp_network_adapter_id }}/NetworkDeviceFunctions"
+ register: fetched_network_device_function_id
+
+ - name: Extracting network device function id
+ ansible.builtin.set_fact:
+ network_device_function_id: "{{ fetched_network_device_function_id.content | from_json | json_query('Members[0]') }}"
+
+ - name: Setting network device function id
+ ansible.builtin.set_fact:
+ inp_network_device_function_id: "{{ '@odata.id' | extract(network_device_function_id) | split('/') | last }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml
index aa9fd74c5..b73bedd59 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml
@@ -3,9 +3,9 @@
ansible.builtin.import_role:
name: idrac_import_server_config_profile
vars:
- hostname: "{{ lookup('env', 'HOSTNAME') }}"
- username: "{{ lookup('env', 'USERNAME') }}"
- password: "{{ lookup('env', 'PASSWORD') }}"
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
import_buffer: "{ \"SystemConfiguration\": {\"Components\": [
{ \"FQDD\": \"iDRAC.Embedded.1\",\"Attributes\": [{ \"Name\": \"Time.1#Timezone\",
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml
new file mode 100644
index 000000000..42d0ff214
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml
@@ -0,0 +1,24 @@
+- name: Helper file for RAID
+ block:
+ - name: Fetch storage controller id
+ ansible.builtin.uri: &network_uri
+ url: "https://{{ idrac_ip | ansible.utils.ipwrap }}:{{ idrac_port }}/redfish/v1/Systems/System.Embedded.1/Storage"
+ user: "{{ idrac_user }}"
+ password: "{{ idrac_password }}"
+ method: GET
+ body: {}
+ validate_certs: false
+ body_format: json
+ return_content: true
+ status_code: 200
+ headers: 'Accept=application/json'
+ force_basic_auth: true
+ register: fetched_controller_id
+
+ - name: Extracting controller id
+ ansible.builtin.set_fact:
+ controller_id: "{{ fetched_controller_id.content | from_json | json_query('Members[0]') }}"
+
+ - name: Setting controller id
+ ansible.builtin.set_fact:
+ inp_controller_id: "{{ '@odata.id' | extract(controller_id) | split('/') | last }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/clear_job_queue/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/clear_job_queue/converge.yml
index 6bf6af48b..cc9f08965 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/clear_job_queue/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/clear_job_queue/converge.yml
@@ -7,7 +7,7 @@
ansible.builtin.set_fact:
input: &input
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
no_log: true
@@ -83,7 +83,7 @@
- name: Creating a job which exports SCP local path with all components
dellemc.openmanage.idrac_server_config_profile:
idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
- idrac_user: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
scp_components:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
index 1d14502f8..2c16c6319 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
@@ -6,7 +6,7 @@
- name: Creating job to export SCP local path with all components
dellemc.openmanage.idrac_server_config_profile:
idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
- idrac_user: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
scp_components:
@@ -24,7 +24,7 @@
name: "idrac_job_queue"
vars:
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
job_id: JID_12345678
@@ -41,7 +41,7 @@
name: idrac_job_queue
vars:
hostname: "invalidHostname"
- username: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
clear_job_queue: true
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
@@ -78,7 +78,7 @@
name: idrac_job_queue
vars:
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "invalidPassword"
clear_job_queue: true
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
index ecf859bf7..809dda61f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
@@ -7,7 +7,7 @@
ansible.builtin.set_fact:
input: &input
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
no_log: true
@@ -37,7 +37,7 @@
- name: Creating job to export SCP local path with all components
dellemc.openmanage.idrac_server_config_profile:
idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
- idrac_user: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
scp_components:
@@ -55,7 +55,7 @@
url: "https://{{ lookup('env', 'IDRAC_IP') }}/redfish/v1/Managers/iDRAC.Embedded.1/Jobs"
validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
method: "GET"
- user: "{{ lookup('env', 'IDRAC_USERNAME') }}"
+ user: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
headers:
Accept: "application/json"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/README.md b/ansible_collections/dellemc/openmanage/roles/idrac_user/README.md
new file mode 100644
index 000000000..0404dfe71
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/README.md
@@ -0,0 +1,347 @@
+# idrac_user
+
+Role to manage local users for iDRAC.
+
+## Requirements
+
+### Development
+
+Requirements to develop and contribute to the role.
+
+```
+ansible
+docker
+molecule
+python
+```
+
+### Production
+
+Requirements to use the role.
+
+```
+ansible
+python
+```
+
+### Ansible collections
+
+Collections required to use the role
+
+```
+dellemc.openmanage
+```
+
+## Role Variables
+
+<table>
+<thead>
+ <tr>
+ <th>Name</th>
+ <th>Required</th>
+ <th>Default Value</th>
+ <th>Choices</th>
+ <th>Type</th>
+ <th>Description</th>
+ </tr>
+</thead>
+<tbody>
+ <tr>
+ <td>hostname</td>
+ <td>true</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- IPv4, IPv6 Address or hostname of the iDRAC.</td>
+ </tr>
+ <tr>
+ <td>username</td>
+ <td>true</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- iDRAC username with 'Administrator' privilege.</td>
+ </tr>
+ <tr>
+ <td>password</td>
+ <td>true</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- iDRAC user password.</td>
+ </tr>
+ <tr>
+ <td>https_port</td>
+ <td>false</td>
+ <td>443</td>
+ <td></td>
+ <td>int</td>
+ <td>- iDRAC port.</td>
+ </tr>
+ <tr>
+ <td>validate_certs</td>
+ <td>false</td>
+ <td>true</td>
+ <td></td>
+ <td>bool</td>
+ <td>- If C(false), the SSL certificates will not be validated.<br>- Configure C(false) only on personally controlled sites where self-signed certificates are used.</td>
+ </tr>
+ <tr>
+ <td>ca_path</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>path</td>
+ <td>- The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.</td>
+ </tr>
+ <tr>
+ <td>https_timeout</td>
+ <td>false</td>
+ <td>30</td>
+ <td></td>
+ <td>int</td>
+ <td>- The HTTPS socket level timeout in seconds.</td>
+ </tr>
+ <tr>
+ <td>state</td>
+ <td>false</td>
+ <td>present</td>
+ <td>[present, absent]</td>
+ <td>str</td>
+ <td>- Select C(present) to create or modify a user account.</br>- Select C(absent) to remove a user account.</td>
+ </tr>
+ <tr>
+ <td>user_name</td>
+ <td>true</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- Provide username of the iDRAC user account that is created, deleted, or modified.</td>
+ </tr>
+ <tr>
+ <td>user_password</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- Provide password for the iDRAC user account that is created, or modified. The password can be changed when the user account is modified.</br>- To ensure security, the I(user_password) must be at least eight characters long and must contain
+ lowercase and upper-case characters, numbers, and special characters.
+</td>
+ </tr>
+ <tr>
+ <td>new_user_name</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>str</td>
+ <td>- Provide the I(user_name) for the iDRAC user account that is modified.</td>
+ </tr>
+ <tr>
+ <td>privilege</td>
+ <td>false</td>
+ <td></td>
+ <td>["Administrator","ReadOnly","Operator","None"]</td>
+ <td>str</td>
+ <td>- Following are the role-based privileges.</br>- A user with C(Administrator) privilege can log in to iDRAC, and then configure iDRAC, configure users,clear logs, control and configure system, access virtual console, access virtual media, test alerts, and execute debug commands.
+ </br>- A user with C(Operator) privilege can log in to iDRAC, and then configure iDRAC, control and configure system, access virtual console, access virtual media, and execute debug commands.</br>- A user with C(ReadOnly) privilege can only log in to iDRAC.</br>- A user with C(None), no privileges assigned.</br>- Will be ignored, if I(custom_privilege) parameter is provided.</td>
+ </tr>
+ </tr>
+ <tr>
+ <td>custom_privilege</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>int</td>
+ <td>- Provide the custom role-based authority privileges allowed for the user.</br>- To create a custom privilege, add up the privilege decimal values as defined below.</br>Login - 1</br>Configure - 2</br>Configure Users - 4</br>Logs - 8</br>System Control - 16</br>Access Virtual Console - 32</br>Access Virtual Media - 64</br>System Operations - 128</br>Debug - 256</br>- The value has to be in the range 0-511.</td>
+ </tr>
+ <tr>
+ <td>ipmi_lan_privilege</td>
+ <td>false</td>
+ <td></td>
+ <td>["Administrator","ReadOnly","Operator","No Access"]</td>
+ <td>str</td>
+ <td>- The Intelligent Platform Management Interface LAN privilege level assigned to the user.</td>
+ </tr>
+ <tr>
+ <td>ipmi_serial_privilege</td>
+ <td>false</td>
+ <td></td>
+ <td>["Administrator","ReadOnly","Operator","No Access"]</td>
+ <td>str</td>
+ <td>- The Intelligent Platform Management Interface Serial Port privilege level assigned to the user.</br>- This option is only applicable for rack and tower servers.</td>
+ </tr>
+ <tr>
+ <td>enable</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>bool</td>
+ <td>Provide the option to enable or disable a user from logging in to iDRAC.</td>
+ </tr>
+ <tr>
+ <td>sol_enable</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>bool</td>
+ <td>Enables Serial Over Lan (SOL) for an iDRAC user.</td>
+ </tr>
+ <tr>
+ <td>protocol_enable</td>
+ <td>false</td>
+ <td></td>
+ <td></td>
+ <td>bool</td>
+ <td>Enables SNMPv3 protocol for the iDRAC user.</td>
+ </tr>
+ <tr>
+ <td>authentication_protocol</td>
+ <td>false</td>
+ <td></td>
+ <td>["None","SHA5","MD5"]</td>
+ <td>str</td>
+ <td>- This option allows to configure one of the following authentication protocol types to authenticate the iDRAC user.</br>- Secure Hash Algorithm C(SHA).</br>- Message Digest 5 C(MD5).</br>- If C(None) is selected, then the authentication protocol is not configured.</td>
+ </tr>
+ <tr>
+ <td>privacy_protocol</td>
+ <td>false</td>
+ <td></td>
+ <td>["None","DES","AES"]</td>
+ <td>str</td>
+ <td>- This option allows to configure one of the following privacy encryption protocols for the iDRAC user.</br>- Data Encryption Standard C(DES).</br>- Advanced Encryption Standard C(AES).</br>- If C(None) is selected, then the privacy protocol is not configured.</td>
+ </tr>
+
+
+## Fact variables
+
+<table>
+<thead>
+ <tr>
+ <th>Name</th>
+ <th>Sample</th>
+ <th>Description</th>
+ </tr>
+</thead>
+ <tbody>
+ <tr>
+ <td>idrac_user_out</td>
+ <td>{"changed": true,
+ "failed": false,
+ "msg": "Successfully created user account details."
+}</td>
+<td>Output of the iDRAC user role</td>
+</tr>
+<tr>
+ <td>idrac_user_account</td>
+ <td>
+ {"changed": true,
+ "failed": false,
+ {
+ "AccountTypes": [
+ "Redfish",
+ "SNMP",
+ "OEM",
+ "HostConsole",
+ "ManagerConsole",
+ "IPMI",
+ "KVMIP",
+ "VirtualMedia",
+ "WebUI"
+ ],
+ "Description": "User Account",
+ "Enabled": true,
+ "Id": "2",
+ "Locked": false,
+ "Name": "User Account",
+ "OEMAccountTypes": [
+ "IPMI",
+ "SOL",
+ "WSMAN",
+ "UI",
+ "RACADM"
+ ],
+ "Oem": {
+ "Dell": {
+ "SNMPv3PassphraseEnabled": "Disabled"
+ }
+ },
+ "Password": null,
+ "PasswordChangeRequired": false,
+ "PasswordExpiration": null,
+ "RoleId": "Administrator",
+ "SNMP": {
+ "AuthenticationKey": null,
+ "AuthenticationKeySet": true,
+ "AuthenticationProtocol": "HMAC_MD5",
+ "EncryptionKey": null,
+ "EncryptionKeySet": true,
+ "EncryptionProtocol": "CBC_DES"
+ },
+ "StrictAccountTypes": false,
+ "UserName": "root"
+}"
+}</td>
+<td>Details of the iDRAC user account that is created or modified.</td>
+</tr>
+</tbody>
+</table>
+
+## Examples
+
+---
+
+```yml
+- name: Configure a new iDRAC user
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "192.1.2.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ user_name: user_name
+ user_password: user_password
+ privilege: Administrator
+ ipmi_lan_privilege: Administrator
+ ipmi_serial_privilege: Administrator
+ enable: true
+ sol_enable: true
+ protocol_enable: true
+ authentication_protocol: SHA
+ privacy_protocol: AES
+```
+
+```yml
+- name: Modify username and password for the existing iDRAC user
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "192.1.2.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ user_name: user_name
+ new_user_name: new_user_name
+ user_password: user_password
+```
+
+```yml
+-- name: Delete existing iDRAC user account
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "192.1.2.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ user_name: user_name
+```
+## Author Information
+
+---
+
+Dell Technologies <br>
+Kritika Bhateja (Kritika.Bhateja@Dell.com) 2024
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/defaults/main.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/defaults/main.yml
new file mode 100644
index 000000000..e73b71e4f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/defaults/main.yml
@@ -0,0 +1,6 @@
+---
+# defaults file for idrac_user
+validate_certs: true
+https_timeout: 30
+https_port: 443
+state: present
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/handlers/main.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/handlers/main.yml
new file mode 100644
index 000000000..8ba5fa018
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/handlers/main.yml
@@ -0,0 +1,2 @@
+---
+# handlers file for idrac_user
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/argument_specs.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/argument_specs.yml
new file mode 100644
index 000000000..8075581bc
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/argument_specs.yml
@@ -0,0 +1,170 @@
+---
+argument_specs:
+ main:
+ version_added: "9.0.0"
+ short_description: Role to manage local users for iDRAC.
+ description:
+ - Role to manage manage local users for iDRACs
+ (iDRAC8 and iDRAC9 only) for Dell PowerEdge servers.
+ options:
+ hostname:
+ required: true
+ type: str
+ description: IPv4, IPv6 Address or hostname of the iDRAC.
+ username:
+ type: str
+ description: iDRAC username with 'Administrator' privilege.
+ password:
+ type: str
+ description: iDRAC user password.
+ https_port:
+ type: int
+ description: iDRAC port.
+ default: 443
+ validate_certs:
+ description:
+ - If C(false), the SSL certificates will not be validated.
+ - Configure C(false) only on personally controlled sites
+ where self-signed certificates are used.
+ type: bool
+ default: true
+ ca_path:
+ description:
+ - The Privacy Enhanced Mail (PEM) file that contains a CA
+ certificate to be used for the validation.
+ type: path
+ https_timeout:
+ description: The HTTPS socket level timeout in seconds.
+ type: int
+ default: 30
+ state:
+ description:
+ - Select C(present) to create or modify a user account.
+ - Select C(absent) to remove a user account.
+ type: str
+ default: present
+ choices:
+ [
+ "present",
+ "absent"
+ ]
+ user_name:
+ description: Provide username of the iDRAC user account that
+ is created, deleted, or modified.
+ type: str
+ required: true
+ user_password:
+ description:
+ - Password for the iDRAC user account that is created,
+ or modified. The password can be changed when the user
+ account is modified.
+ - To ensure security, the I(user_password) must be at least eight
+ characters long and must contain lowercase and upper-case
+ characters, numbers, and special characters.
+ type: str
+ new_user_name:
+ description: Provide the I(user_name) for the iDRAC user
+ account that is modified.
+ type: str
+ privilege:
+ description:
+ - Following are the role-based privileges.
+ - A user with C(Administrator) privilege can log in to iDRAC,
+ and then configure iDRAC, configure users, clear logs, control
+ and configure system, access virtual console, access virtual
+ media, test alerts, and execute debug commands.
+ - A user with C(Operator) privilege can log in to iDRAC, and then
+ configure iDRAC, control and configure system, access virtual
+ console, access virtual media, and execute debug commands.
+ - A user with C(ReadOnly) privilege can only log in to iDRAC.
+ - A user with C(None), no privileges assigned.
+ - Will be ignored, if I(custom_privilege) parameter is provided.
+ type: str
+ choices:
+ [
+ "Administrator",
+ "ReadOnly",
+ "Operator",
+ "None"
+ ]
+ custom_privilege:
+ description:
+ - Provide the custom role-based authority privileges allowed
+ for the user .
+ - To create a custom privilege, add up the privilege decimal
+ values as defined below.
+ Login - 1
+ Configure - 2
+ Configure Users - 4
+ Logs - 8
+ System Control - 16
+ Access Virtual Console - 32
+ Access Virtual Media - 64
+ System Operations - 128
+ Debug - 256
+ - The value has to be in the range 0-511.
+ type: int
+ ipmi_lan_privilege:
+ description: The Intelligent Platform Management
+ Interface LAN privilege level assigned to the user.
+ type: str
+ choices:
+ [
+ "Administrator",
+ "Operator",
+ "User",
+ "No Access"
+ ]
+ ipmi_serial_privilege:
+ description:
+ - The Intelligent Platform Management Interface Serial Port
+ privilege level assigned to the user.
+ - This option is only applicable for rack and tower servers.
+ type: str
+ choices:
+ [
+ "Administrator",
+ "Operator",
+ "User",
+ "No Access"
+ ]
+ enable:
+ description: Provide the option to enable or disable a user
+ from logging in to iDRAC.
+ type: bool
+ sol_enable:
+ description: Enables Serial Over Lan (SOL) for an iDRAC user.
+ type: bool
+ protocol_enable:
+ description: Enables SNMPv3 protocol for the iDRAC user.
+ type: bool
+ authentication_protocol:
+ description:
+ - This option allows to configure one of the following
+ authentication protocol types to authenticate the iDRAC user.
+ - Secure Hash Algorithm C(SHA).
+ - Message Digest 5 C(MD5).
+ - If C(None) is selected, then the authentication protocol
+ is not configured.
+ type: str
+ choices:
+ [
+ "None",
+ "SHA",
+ "MD5"
+ ]
+ privacy_protocol:
+ description:
+ - This option allows to configure one of the following
+ privacy encryption protocols for the iDRAC user.
+ - Data Encryption Standard C(DES).
+ - Advanced Encryption Standard C(AES).
+ - If C(None) is selected, then the privacy protocol =
+ is not configured.
+ type: str
+ choices:
+ [
+ "None",
+ "DES",
+ "AES"
+ ]
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/main.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/main.yml
new file mode 100644
index 000000000..b2d809a64
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/meta/main.yml
@@ -0,0 +1,21 @@
+---
+galaxy_info:
+ author: Kritika-Bhateja
+ description: This role helps to manage local users for iDRAC.
+ company: Dell Technologies
+ license: GPL-3.0-only
+ min_ansible_version: "2.15.8"
+ platforms:
+ - name: EL
+ versions:
+ - "9"
+ - "8"
+ - name: Ubuntu
+ versions:
+ - jammy
+ - name: SLES
+ versions:
+ - "15SP3"
+ - "15SP4"
+ galaxy_tags: []
+dependencies: []
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/converge.yml
new file mode 100644
index 000000000..e2d712034
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/converge.yml
@@ -0,0 +1,49 @@
+---
+- name: Validate creating a user with all parameters
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Configure a new iDRAC user with all parameters except user_password
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: 'test1234'
+ privilege: Administrator
+ ipmi_lan_privilege: Administrator
+ ipmi_serial_privilege: Administrator
+ enable: true
+ sol_enable: true
+ protocol_enable: true
+ authentication_protocol: SHA
+ privacy_protocol: AES
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in check mode
+ ansible.builtin.assert:
+ that: idrac_user_out.msg == "Changes found to commit!"
+ when: ansible_check_mode
+ tags: molecule-idempotence-notest
+
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully created user account."
+ - "{{ (user_details.user_info | length) != 0 }}"
+ - user_details.user_info[0].UserName == idrac_user_account.UserName
+ - user_details.user_info[0].RoleId == "Administrator"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/molecule.yml
new file mode 100644
index 000000000..12be22020
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152120/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/converge.yml
new file mode 100644
index 000000000..bff8a0d5b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/converge.yml
@@ -0,0 +1,46 @@
+---
+- name: Validate creating a user with minimum required parameters
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ idem_msg: "Requested changes are already present in the user slot."
+ tasks:
+ - name: Configure a new iDRAC user with all parameters except user_password
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+
+ - name: Asserting user creation in check mode
+ ansible.builtin.assert:
+ that: idrac_user_out.msg == "Changes found to commit!"
+ when: ansible_check_mode
+ tags: molecule-idempotence-notest
+
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully created user account."
+ - "{{ (user_details.user_info | length) != 0 }}"
+ - user_details.user_info[0].UserName == idrac_user_account.UserName
+ - user_details.user_info[0].RoleId == "None"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in idempotency mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == idem_msg
+ when: not ansible_check_mode and not idrac_user_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/molecule.yml
new file mode 100644
index 000000000..12be22020
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152146/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/converge.yml
new file mode 100644
index 000000000..d96518aae
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/converge.yml
@@ -0,0 +1,32 @@
+---
+- name: Verifying custom_previlege has precedence over previlege parameters
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ roles:
+ - role: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: '#testABCdef1234!'
+ privilege: Operator
+ custom_privilege: 1
+
+ tasks:
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+
+ - name: Asserting user creation in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully created user account."
+ - "{{ (user_details.user_info | length) != 0 }}"
+ - user_details.user_info[0].UserName == idrac_user_account.UserName
+ - user_details.user_info[0].RoleId == "ReadOnly"
+ - idrac_user_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/molecule.yml
new file mode 100644
index 000000000..326227dee
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152147/molecule.yml
@@ -0,0 +1,10 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
+scenario:
+ test_sequence:
+ - cleanup
+ - converge
+ - destroy
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/converge.yml
new file mode 100644
index 000000000..f5044b244
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/converge.yml
@@ -0,0 +1,54 @@
+---
+- name: Validate modifying a user with all parameters
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ idem_msg: "Requested changes are already present in the user slot."
+ tasks:
+ - name: Modify iDRAC user with all parameters
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ privilege: Operator
+ ipmi_lan_privilege: User
+ ipmi_serial_privilege: User
+ enable: true
+ sol_enable: true
+ protocol_enable: true
+ authentication_protocol: SHA
+ privacy_protocol: AES
+
+ - name: Asserting user creation in check mode
+ ansible.builtin.assert:
+ that: idrac_user_out.msg == "Changes found to commit!"
+ when: ansible_check_mode
+ tags: molecule-idempotence-notest
+
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully updated user account."
+ - "{{ (user_details.user_info | length) != 0 }}"
+ - user_details.user_info[0].UserName == idrac_user_account.UserName
+ - user_details.user_info[0].RoleId == "Operator"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in Idempotency mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == idem_msg
+ when: not ansible_check_mode and not idrac_user_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/molecule.yml
new file mode 100644
index 000000000..12be22020
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/prepare.yml
new file mode 100644
index 000000000..29621fd56
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152148/prepare.yml
@@ -0,0 +1,26 @@
+---
+- name: Prepare
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Configure a new iDRAC user with all parameters
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: 'test1234'
+ privilege: ReadOnly
+ ipmi_lan_privilege: Operator
+ ipmi_serial_privilege: No Access
+ enable: true
+ sol_enable: true
+ protocol_enable: true
+ authentication_protocol: SHA
+ privacy_protocol: AES
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/converge.yml
new file mode 100644
index 000000000..19ee7d60e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/converge.yml
@@ -0,0 +1,45 @@
+---
+- name: Validate creating a user with minimum required parameters with env
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ idem_msg: "Requested changes are already present in the user slot."
+ tasks:
+ - name: Modify iDRAC user with minimum requred parameters
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ custom_privilege: 511
+
+ - name: Asserting user creation in check mode
+ ansible.builtin.assert:
+ that: idrac_user_out.msg == "Changes found to commit!"
+ when: ansible_check_mode
+ tags: molecule-idempotence-notest
+
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully updated user account."
+ - "{{ (user_details.user_info | length) != 0 }}"
+ - user_details.user_info[0].UserName == idrac_user_account.UserName
+ - user_details.user_info[0].RoleId == "Administrator"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user creation in idempotency mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == idem_msg
+ when: not ansible_check_mode and not idrac_user_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/molecule.yml
new file mode 100644
index 000000000..12be22020
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/prepare.yml
new file mode 100644
index 000000000..11a785503
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152149/prepare.yml
@@ -0,0 +1,19 @@
+---
+- name: Prepare
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Configure a new iDRAC user with minimum required parameters
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: "#1234Abc"
+ custom_privilege: 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/converge.yml
new file mode 100644
index 000000000..672912bc3
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/converge.yml
@@ -0,0 +1,51 @@
+---
+- name: Validate deleting a user
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Delete iDRAC user
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: absent
+ user_name: "{{ idrac_user_name }}"
+
+ - name: Asserting user deletion in check mode
+ ansible.builtin.assert:
+ that: idrac_user_out.msg == "Changes found to commit!"
+ when: ansible_check_mode
+ tags: molecule-idempotence-notest
+
+ - name: Fetching user info in normal mode
+ ansible.builtin.include_tasks:
+ file: ../resources/idrac_user/get_user_info.yml
+ vars:
+ idrac_user_name: "{{ default(omit) }}"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Extracting username
+ ansible.builtin.set_fact:
+ user_list: "{{ user_details.user_info | map(attribute='UserName') }}"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user deletion in normal mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "Successfully deleted user account."
+ - "{{ idrac_user_name not in user_list }}"
+ when: not ansible_check_mode and idrac_user_out.changed
+ tags: molecule-idempotence-notest
+
+ - name: Asserting user deletion in idempotency mode
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "The user account is absent."
+ when: not ansible_check_mode and not idrac_user_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/molecule.yml
new file mode 100644
index 000000000..12be22020
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/molecule.yml
@@ -0,0 +1,5 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/prepare.yml
new file mode 100644
index 000000000..69b4ba6ab
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/TC-152150/prepare.yml
@@ -0,0 +1,19 @@
+---
+- name: Prepare
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Configure a new iDRAC user with minimum required parameters
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: "#1234Abc"
+ custom_privilege: 65
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/converge.yml
new file mode 100644
index 000000000..c0e0e96d2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/converge.yml
@@ -0,0 +1,163 @@
+---
+- name: TC-152151 - Validate all negative scenarios
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ max_user: 16
+ tasks:
+ - name: Invalid hostname value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "invalid"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid hostname
+ ansible.builtin.assert:
+ that: >
+ idrac_user_out.msg == "<urlopen error [Errno -2]
+ Name or service not known>"
+
+ - name: Invalid username value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "invalid"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid username
+ ansible.builtin.assert:
+ that:
+ - '"HTTP Error 401" in idrac_user_out.msg'
+
+ - name: Invalid password value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "invalid"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid password
+ ansible.builtin.assert:
+ that:
+ - '"HTTP Error 401" in idrac_user_out.msg'
+
+ - name: Invalid validate_certs value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: invalid
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid validate_certs
+ ansible.builtin.assert:
+ that:
+ - '"not a valid boolean" in idrac_user_out.msg'
+
+ - name: Invalid ca_path value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ ca_path: invalid
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid ca_path
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.failed
+
+ - name: Invalid username value
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "12345@Dell"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid username
+ ansible.builtin.assert:
+ that:
+ - '"HTTP Error 400" in idrac_user_out.msg'
+
+ - name: Delete a idrac_user by giving invalid user_name
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: absent
+ user_name: "12345Dell"
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for deletion of a idrac_user by giving invalid user_name
+ ansible.builtin.assert:
+ that:
+ - idrac_user_out.msg == "The user account is absent."
+
+ - name: Invalid value for "custom_priviledge"
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ user_name: "{{ idrac_user_name }}"
+ user_password: "#123Dell456!"
+ custom_privilege: -1
+ ignore_errors: true
+ ignore_unreachable: true
+ register: idrac_user_fail_case
+
+ - name: Asserting for invalid value for "custom_priviledge"
+ ansible.builtin.assert:
+ that: >
+ idrac_user_out.msg == "custom_privilege value should be from 0 to
+ 511."
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/molecule.yml
new file mode 100644
index 000000000..326227dee
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/default/molecule.yml
@@ -0,0 +1,10 @@
+---
+provisioner:
+ name: ansible
+ playbooks:
+ cleanup: ../resources/idrac_user/cleanup.yml
+scenario:
+ test_sequence:
+ - cleanup
+ - converge
+ - destroy
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/cleanup.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/cleanup.yml
new file mode 100644
index 000000000..6b1aea81f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/cleanup.yml
@@ -0,0 +1,17 @@
+---
+- name: Cleanup
+ hosts: all
+ gather_facts: false
+ vars:
+ idrac_user_name: "moleculeTest"
+ tasks:
+ - name: Deleting iDRAC user
+ ansible.builtin.import_role:
+ name: idrac_user
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: absent
+ user_name: "{{ idrac_user_name }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml
new file mode 100644
index 000000000..c8151b698
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml
@@ -0,0 +1,9 @@
+---
+- name: Fetching user info
+ dellemc.openmanage.idrac_user_info:
+ idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
+ idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
+ idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ username: "{{ idrac_user_name | default(omit) }}"
+ register: user_details
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/absent.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/absent.yml
new file mode 100644
index 000000000..619d70281
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/absent.yml
@@ -0,0 +1,14 @@
+---
+- name: Delete existing iDRAC user account
+ dellemc.openmanage.idrac_user:
+ idrac_ip: "{{ hostname }}"
+ idrac_user: "{{ username | default(omit) }}"
+ idrac_password: "{{ password | default(omit) }}"
+ idrac_port: "{{ https_port }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ validate_certs: "{{ validate_certs }}"
+ state: "{{ state }}"
+ user_name: "{{ user_name }}"
+ timeout: "{{ https_timeout }}"
+ register: idrac_user_out
+ delegate_to: "{{ idrac_user_delegate }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/get_user.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/get_user.yml
new file mode 100644
index 000000000..669743cc2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/get_user.yml
@@ -0,0 +1,22 @@
+---
+- name: Set user_name based on new_user_name
+ ansible.builtin.set_fact:
+ idrac_user_present_name: "{{ new_user_name if new_user_name is defined else user_name }}"
+
+- name: Fetch user account details
+ dellemc.openmanage.idrac_user_info:
+ idrac_ip: "{{ hostname }}"
+ idrac_user: "{{ username | default(omit) }}"
+ idrac_password: "{{ password | default(omit) }}"
+ idrac_port: "{{ https_port }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ validate_certs: "{{ validate_certs }}"
+ username: "{{ idrac_user_present_name }}"
+ timeout: "{{ https_timeout }}"
+ register: idrac_user_account
+ delegate_to: "{{ idrac_user_delegate }}"
+
+- name: Extracting user account details
+ ansible.builtin.set_fact:
+ idrac_user_account: "{{ idrac_user_account.user_info[0] |
+ ansible.utils.remove_keys('Keys') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/main.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/main.yml
new file mode 100644
index 000000000..2ba2f734f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/main.yml
@@ -0,0 +1,10 @@
+---
+# tasks file for idrac_user
+
+- name: Create or modify user
+ ansible.builtin.include_tasks: present.yml
+ when: state == "present"
+
+- name: Delete user
+ ansible.builtin.include_tasks: absent.yml
+ when: state == "absent"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/present.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/present.yml
new file mode 100644
index 000000000..3997f441b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tasks/present.yml
@@ -0,0 +1,29 @@
+---
+- name: Create or modify a iDRAC user
+ dellemc.openmanage.idrac_user:
+ idrac_ip: "{{ hostname }}"
+ idrac_user: "{{ username | default(omit) }}"
+ idrac_password: "{{ password | default(omit) }}"
+ idrac_port: "{{ https_port }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ validate_certs: "{{ validate_certs }}"
+ state: "{{ state }}"
+ user_name: "{{ user_name }}"
+ user_password: "{{ user_password | default(omit) }}"
+ new_user_name: "{{ new_user_name | default(omit) }}"
+ privilege: "{{ privilege | default(omit) }}"
+ custom_privilege: "{{ custom_privilege | default(omit) }}"
+ ipmi_lan_privilege: "{{ ipmi_lan_privilege | default(omit) }}"
+ ipmi_serial_privilege: "{{ ipmi_serial_privilege | default(omit) }}"
+ enable: "{{ enable | default(omit) }}"
+ sol_enable: "{{ sol_enable | default(omit) }}"
+ protocol_enable: "{{ protocol_enable | default(omit) }}"
+ authentication_protocol: "{{ authentication_protocol | default(omit) }}"
+ privacy_protocol: "{{ privacy_protocol | default(omit) }}"
+ timeout: "{{ https_timeout }}"
+ register: idrac_user_out
+ delegate_to: "{{ idrac_user_delegate }}"
+
+- name: Fetch user account information
+ ansible.builtin.include_tasks: get_user.yml
+ when: idrac_user_out.changed and not ansible_check_mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/inventory b/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/inventory
new file mode 100644
index 000000000..878877b07
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/inventory
@@ -0,0 +1,2 @@
+localhost
+
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/test.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/test.yml
new file mode 100644
index 000000000..a3a34d106
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/tests/test.yml
@@ -0,0 +1,6 @@
+---
+- name: Executing idrac user
+ hosts: localhost
+ remote_user: root
+ roles:
+ - idrac_user
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_user/vars/main.yml b/ansible_collections/dellemc/openmanage/roles/idrac_user/vars/main.yml
new file mode 100644
index 000000000..86d2a24e3
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_user/vars/main.yml
@@ -0,0 +1,12 @@
+---
+# vars file for idrac_user
+idrac_user_delegate:
+ "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+idrac_user_uri_headers:
+ Accept: "application/json"
+ Content-Type: "application/json"
+idrac_user_uri_body_format: "json"
+idrac_user_uri_status_code: 200
+idrac_user_uri_return_content: true
+idrac_user_force_basic_auth: true
+idrac_user_uri_method: GET
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__create_virtual_drive.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__create_virtual_drive.yml
new file mode 100644
index 000000000..664d596e6
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__create_virtual_drive.yml
@@ -0,0 +1,29 @@
+---
+- name: Setting controller ID
+ ansible.builtin.set_fact:
+ redfish_storage_volume_controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
+
+- name: Pre-req Create a volume.
+ ansible.builtin.import_role:
+ name: redfish_storage_volume
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ raid_type: "RAID0"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: "{{ redfish_storage_volume_controller_id }}"
+ drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ job_wait: true
+ check_mode: false
+
+- name: Fetching Volume_id from iDRAC
+ ansible.builtin.include_tasks: ../__get_helper.yml
+ vars:
+ url: "Systems/System.Embedded.1/Storage/{{ redfish_storage_volume_controller_id }}/Volumes"
+
+- name: Extracting volume_id
+ ansible.builtin.set_fact:
+ redfish_storage_volume_id: "{{ (redfish_storage_volume_fetched_output.json.Members | last)['@odata.id'] | ansible.builtin.split('/') | last }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__job_track.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__job_track.yml
new file mode 100644
index 000000000..4feafacaf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/__job_track.yml
@@ -0,0 +1,17 @@
+---
+- name: Track job
+ ansible.builtin.uri:
+ url: "https://{{ lookup('env', 'IDRAC_IP') }}/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/Jobs/{{ job_id }}"
+ user: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ method: GET
+ force_basic_auth: true
+ validate_certs: false
+ status_code: 200
+ headers: 'Accept=application/json'
+ register: redfish_storage_volume_job_wait
+ until: redfish_storage_volume_job_wait.json.JobState == "Completed" or redfish_storage_volume_job_wait.json.JobState == "Failed"
+ failed_when: redfish_storage_volume_job_wait.json.JobState == "Failed"
+ retries: 20
+ delay: 30
+ check_mode: false
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
index 72b2e5977..221fe3bbd 100644
--- a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
@@ -8,7 +8,7 @@
ansible.builtin.import_role:
name: redfish_storage_volume
vars:
- hostname: "{{ lookup('env', 'INVALID_IDRAC_IP') }}"
+ hostname: "192.168.1.1"
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
@@ -23,6 +23,7 @@
job_wait: false
ignore_errors: true
register: redfish_storage_volume_result
+ ignore_unreachable: true
- name: Asserting after performing operation.
ansible.builtin.assert:
@@ -35,7 +36,7 @@
vars:
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "{{ lookup('env', 'IDRAC_USER') }}"
- password: "{{ lookup('env', 'INVALID_IDRAC_PASSWORD') }}"
+ password: "invalid_password"
validate_certs: false
state: present
volume_type: "NonRedundant"
@@ -78,6 +79,7 @@
ansible.builtin.assert:
that: |-
redfish_storage_volume_out.msg == "HTTP Error 400: Bad Request"
+ when: not ansible_check_mode
- name: To check the behaviour of invalid certificate path.
ansible.builtin.import_role:
@@ -87,7 +89,7 @@
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: true
- ca_path: "{{ lookup('env', 'INVALID_CERT_PATH') }}"
+ ca_path: "/tmp/invalid_ca_path.pem"
state: present
volume_type: "SpannedStripesWithParity"
name: "VD" # noqa: var-naming[no-reserved]
@@ -99,6 +101,7 @@
job_wait: false
ignore_errors: true
register: redfish_storage_volume_result
+ ignore_unreachable: true
- name: Asserting after performing operation for invalid certificate path.
ansible.builtin.assert:
@@ -127,6 +130,6 @@
- name: Asserting after performing operation for invalid volume type.
ansible.builtin.assert:
- that: |-
+ that: >
redfish_storage_volume_out.msg == "value of volume_type must be one of: NonRedundant, Mirrored,
StripedWithParity, SpannedMirrors, SpannedStripesWithParity, got: InvalidMirrored"
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/initialization/converge.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/initialization/converge.yml
index a76faebd4..4c0bdeba1 100644
--- a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/initialization/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/initialization/converge.yml
@@ -13,43 +13,65 @@
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
command: initialize
- volume_id: "{{ lookup('env', 'INVALID_VOLUME_ID') }}"
+ volume_id: "invalid_volume_id"
initialize_type: "Fast"
ignore_errors: true
register: redfish_storage_volume_result
- name: Asserting operation for initialization of type Fast.
ansible.builtin.assert:
- that: redfish_storage_volume_out.msg == "Specified Volume Id Disk.Virtual.0:RAID.Mezzanine.1C-1-test does not exist in the System."
+ that: redfish_storage_volume_out.msg == "Specified Volume Id invalid_volume_id does not exist in the System."
- - name: To check the behaviour of Initialization type Fast.
- ansible.builtin.import_role:
- name: redfish_storage_volume
- vars:
- hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USER') }}"
- password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: false
- command: initialize
- volume_id: "{{ lookup('env', 'VOLUME_ID') }}"
- initialize_type: "Fast"
+ - name: Running for Initialization type scenarios
+ block:
+ - name: Pre-req Create a volume.
+ ansible.builtin.include_tasks:
+ file: ../__create_virtual_drive.yml
- - name: Asserting operation for initialization type Fast.
- ansible.builtin.assert:
- that: redfish_storage_volume_out.msg == "Successfully submitted initialize volume task."
+ - name: To check the behaviour of Initialization type Fast.
+ ansible.builtin.import_role:
+ name: redfish_storage_volume
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ command: initialize
+ volume_id: "{{ redfish_storage_volume_id }}"
+ initialize_type: "Fast"
+ job_wait: false
- - name: To check the behaviour of Initialization type Slow.
- ansible.builtin.import_role:
- name: redfish_storage_volume
- vars:
- hostname: "{{ lookup('env', 'IDRAC_IP') }}"
- username: "{{ lookup('env', 'IDRAC_USER') }}"
- password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: false
- command: initialize
- volume_id: "{{ lookup('env', 'VOLUME_ID') }}"
- initialize_type: "Slow"
+ - name: Asserting operation for initialization type Fast.
+ ansible.builtin.assert:
+ that: redfish_storage_volume_out.msg == "Successfully submitted initialize volume task."
- - name: Asserting operation for initialization type Slow.
- ansible.builtin.assert:
- that: redfish_storage_volume_out.msg == "Successfully submitted initialize volume task."
+ - name: Track the initialization job
+ ansible.builtin.include_tasks:
+ file: ../__job_track.yml
+ vars:
+ job_id: "{{ redfish_storage_volume_out.job_status.Id }}"
+
+ - name: To check the behaviour of Initialization type Slow.
+ ansible.builtin.import_role:
+ name: redfish_storage_volume
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ command: initialize
+ volume_id: "{{ redfish_storage_volume_id }}"
+ initialize_type: "Slow"
+ job_wait: true
+
+ - name: Asserting operation for initialization type Slow.
+ ansible.builtin.assert:
+ that: redfish_storage_volume_out.msg == "The job is successfully completed."
+
+ always:
+ - name: Deleting VD
+ ansible.builtin.include_tasks:
+ file: ../__delete_virtual_drive.yml
+ when:
+ - not ansible_check_mode
+ - redfish_storage_volume_id is defined
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py
index fc3b3543d..8d83057e9 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_idrac_redfish.py
@@ -44,7 +44,7 @@ class TestIdracRedfishRest(object):
@pytest.fixture
def module_params(self):
- module_parameters = {'idrac_ip': '192.168.0.1', 'idrac_user': 'username',
+ module_parameters = {'idrac_ip': 'xxx.xxx.x.x', 'idrac_user': 'username',
'idrac_password': 'password', 'idrac_port': '443'}
return module_parameters
@@ -125,7 +125,7 @@ class TestIdracRedfishRest(object):
])
def test_build_url(self, query_params, mocker, idrac_redfish_object):
"""builds complete url"""
- base_uri = 'https://192.168.0.1:443/api'
+ base_uri = 'https://xxx.xxx.x.x:443/api'
path = "/AccountService/Accounts"
mocker.patch(MODULE_UTIL_PATH + 'idrac_redfish.iDRACRedfishAPI._get_url',
return_value=base_uri + path)
@@ -137,7 +137,7 @@ class TestIdracRedfishRest(object):
def test_build_url_none(self, mocker, idrac_redfish_object):
"""builds complete url"""
- base_uri = 'https://192.168.0.1:443/api'
+ base_uri = 'https://xxx.xxx.x.x:443/api'
mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
return_value=base_uri)
url = idrac_redfish_object._build_url("", None)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
index 93892a744..60c5341a1 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_ome.py
@@ -47,7 +47,7 @@ class TestOMERest(object):
@pytest.fixture
def module_params(self):
- module_parameters = {'hostname': '192.168.0.1', 'username': 'username',
+ module_parameters = {'hostname': 'xxx.xxx.x.x', 'username': 'username',
'password': 'password', "port": 443}
return module_parameters
@@ -150,7 +150,7 @@ class TestOMERest(object):
])
def test_build_url(self, query_param, mocker, module_params):
"""builds complete url"""
- base_uri = 'https://192.168.0.1:443/api'
+ base_uri = 'https://xxx.xxx.x.x:443/api'
path = "AccountService/Accounts"
mocker.patch(MODULE_UTIL_PATH + 'ome.RestOME._get_base_url',
return_value=base_uri)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py
index 2e092af15..1dd3ab8b4 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_redfish.py
@@ -39,7 +39,7 @@ class TestRedfishRest(object):
@pytest.fixture
def module_params(self):
- module_parameters = {'baseuri': '192.168.0.1:443', 'username': 'username',
+ module_parameters = {'baseuri': 'xxx.xxx.x.x:443', 'username': 'username',
'password': 'password'}
return module_parameters
@@ -120,7 +120,7 @@ class TestRedfishRest(object):
])
def test_build_url(self, query_params, mocker, redfish_object):
"""builds complete url"""
- base_uri = 'https://192.168.0.1:443/api'
+ base_uri = 'https://xxx.xxx.x.x:443/api'
path = "/AccountService/Accounts"
mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
return_value=base_uri)
@@ -132,7 +132,7 @@ class TestRedfishRest(object):
def test_build_url_none(self, mocker, redfish_object):
"""builds complete url"""
- base_uri = 'https://192.168.0.1:443/api'
+ base_uri = 'https://xxx.xxx.x.x:443/api'
mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
return_value=base_uri)
url = redfish_object._build_url("", None)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_session_utils.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_session_utils.py
new file mode 100644
index 000000000..c53c81b01
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/module_utils/test_session_utils.py
@@ -0,0 +1,415 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.2.0
+# Copyright (C) 2024 Dell Inc.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
+# Other trademarks may be trademarks of their respective owners.
+#
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import os
+import json
+import pytest
+from mock import MagicMock
+from ansible.module_utils.urls import SSLValidationError
+from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.session_utils import SessionAPI, OpenURLResponse
+
+MODULE_UTIL_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.'
+OPEN_URL = 'session_utils.open_url'
+TEST_PATH = "/testpath"
+INVOKE_REQUEST = 'session_utils.SessionAPI.invoke_request'
+JOB_COMPLETE = 'session_utils.SessionAPI.wait_for_job_complete'
+API_TASK = '/api/tasks'
+SLEEP_TIME = 'session_utils.time.sleep'
+
+
+class TestSessionRest(object):
+ """
+ Main class for testing the SessionUtils class.
+ """
+ @pytest.fixture
+ def mock_response(self):
+ """
+ Returns a MagicMock object representing a mock HTTP response.
+
+ The mock response has the following properties:
+ - `getcode()` method returns 200
+ - `headers` property is a dictionary containing the headers of the response
+ - `getheaders()` method returns the same dictionary as `headers`
+ - `read()` method returns a JSON string representing a dictionary with a "value" key and
+ "data" as its value
+
+ :return: A MagicMock object representing a mock HTTP response.
+ :rtype: MagicMock
+ """
+ mock_response = MagicMock()
+ mock_response.getcode.return_value = 200
+ mock_response.headers = mock_response.getheaders.return_value = {
+ 'X-Auth-Token': 'token_id'}
+ mock_response.read.return_value = json.dumps({"value": "data"})
+ return mock_response
+
+ @pytest.fixture
+ def module_params(self):
+ """
+ Fixture that returns a dictionary containing module parameters.
+
+ :return: A dictionary with the following keys:
+ - 'hostname': The hostname of the module.
+ - 'username': The username for authentication.
+ - 'password': The password for authentication.
+ - 'port': The port number for the module.
+ """
+ module_parameters = {'hostname': 'xxx.xxx.x.x', 'username': 'username',
+ 'password': 'password', 'port': '443'}
+ return module_parameters
+
+ @pytest.fixture
+ def session_utils_object(self, module_params):
+ """
+ Creates a SessionAPI object using the provided `module_params` and returns it.
+
+ :param module_params: A dictionary containing the parameters for the SessionAPI object.
+ :type module_params: dict
+ :return: A SessionAPI object.
+ :rtype: SessionAPI
+ """
+ session_utils_obj = SessionAPI(module_params)
+ return session_utils_obj
+
+ def test_invoke_request_with_session(self, mock_response, mocker, module_params):
+ """
+ Test the invoke_request method of the SessionAPI class with a session.
+
+ Args:
+ mock_response (MagicMock): A mocked response object.
+ mocker (MockerFixture): A fixture for mocking objects.
+ module_params (dict): The parameters for the module.
+
+ Returns:
+ None
+
+ Assertions:
+ - Asserts that the response status code is 200.
+ - Asserts that the response JSON data is {"value": "data"}.
+ - Asserts that the response success attribute is True.
+ """
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ obj = SessionAPI(module_params)
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session(self, mock_response, mocker):
+ """
+ Test the `invoke_request` method of the `SessionAPI` class without using a session.
+
+ This test case mocks the `open_url` function from the `MODULE_UTIL_PATH` module to return a
+ mock response.
+ It then creates an instance of the `SessionAPI` class with mock module parameters.
+ The `invoke_request` method is called with a test path and a GET method.
+ The test asserts that the response status code is 200, the response JSON data is
+ {"value": "data"},
+ and the response success flag is True.
+
+ Parameters:
+ - mock_response (MagicMock): A mock response object to be returned by the `open_url`
+ function.
+ - mocker (MockerFixture): A fixture provided by the pytest library for mocking
+ functions.
+
+ Returns:
+ None
+ """
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ module_params = {'hostname': 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX', 'username':
+ 'username',
+ 'password': 'password', "port": '443'}
+ obj = SessionAPI(module_params)
+ response = obj.invoke_request(TEST_PATH, "GET")
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ def test_invoke_request_without_session_with_header(self, mock_response, mocker,
+ module_params):
+ """
+ Test the `invoke_request` method of the `SessionAPI` class when a session is not used and a
+ header is provided.
+
+ This test method mocks the `open_url` function from the `module_utils` module to return a
+ mock response object. It then creates an instance of the `SessionAPI` class with the
+ provided `module_params`. The `invoke_request` method is called with a test path, a request
+ method of "POST", and a headers dictionary containing a single key-value pair.
+
+ The test asserts that the response status code is 200, the response JSON data is
+ `{"value": "data"}`, and the response success flag is `True`.
+
+ Parameters:
+ - `mock_response` (MagicMock): A mock response object to be returned by the `open_url`
+ function.
+ - `mocker` (MockerFixture): A fixture for patching and mocking objects.
+ - `module_params` (dict): A dictionary containing the module parameters.
+
+ Returns:
+ None
+ """
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ obj = SessionAPI(module_params)
+ response = obj.invoke_request(TEST_PATH, "POST", headers={"application": "octstream"})
+ assert response.status_code == 200
+ assert response.json_data == {"value": "data"}
+ assert response.success is True
+
+ @pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
+ def test_invoke_request_error_case_handling(self, exc, mocker, module_params):
+ """
+ Test the error handling in the `invoke_request` method of the `SessionAPI` class.
+
+ This function tests the handling of different types of exceptions that can occur during an
+ HTTP request. It uses the `pytest.mark.parametrize` decorator to run the test multiple
+ times with different exception types. The test mocks the `open_url` method of the
+ `SessionAPI` class to raise the specified exception. It then asserts that the correct
+ exception is raised when calling the `invoke_request` method.
+
+ Args:
+ exc (Exception): The exception type to test.
+ mocker (MockerFixture): The mocker fixture used for mocking dependencies.
+ module_params (dict): The parameters for the `SessionAPI` object.
+
+ Raises:
+ exc: The specified exception type if it is raised during the `invoke_request` call.
+ """
+ mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ side_effect=exc("test"))
+ with pytest.raises(exc):
+ obj = SessionAPI(module_params)
+ obj.invoke_request(TEST_PATH, "GET")
+
+ def test_invoke_request_http_error_handling(self, mock_response, mocker, module_params):
+ """
+ Test the HTTP error handling in the `invoke_request` method of the `SessionAPI` class.
+
+ Args:
+ mock_response (Mock): A mock object representing the response from the HTTP request.
+ mocker (MockerFixture): A fixture for mocking objects.
+ module_params (dict): The parameters for the module.
+
+ Raises:
+ HTTPError: If an HTTP error occurs during the invocation of the request.
+
+ Returns:
+ None
+ """
+ open_url_mock = mocker.patch(MODULE_UTIL_PATH + OPEN_URL,
+ return_value=mock_response)
+ open_url_mock.side_effect = HTTPError('https://testhost.com/', 400,
+ 'Bad Request Error', {}, None)
+ with pytest.raises(HTTPError):
+ obj = SessionAPI(module_params)
+ obj.invoke_request(TEST_PATH, "GET")
+
+ @pytest.mark.parametrize("query_params", [
+ {"inp": {"$filter": "UserName eq 'admin'"},
+ "out": "%24filter=UserName+eq+%27admin%27"},
+ {"inp": {"$top": 1, "$skip": 2, "$filter": "JobType/Id eq 8"}, "out":
+ "%24top=1&%24skip=2&%24filter=JobType%2FId+eq+8"},
+ {"inp": {"$top": 1, "$skip": 3}, "out": "%24top=1&%24skip=3"}
+ ])
+ def test_build_url(self, query_params, mocker, session_utils_object):
+ """
+ builds complete url
+ """
+ base_uri = 'https://xxx.xxx.x.x:443/api'
+ path = "/AccountService/Accounts"
+ mocker.patch(MODULE_UTIL_PATH + 'session_utils.SessionAPI._get_url',
+ return_value=base_uri + path)
+ inp = query_params["inp"]
+ out = query_params["out"]
+ url = session_utils_object._build_url(
+ path, query_param=inp)
+ assert url == base_uri + path + "?" + out
+
+ def test_build_url_none(self, mocker, session_utils_object):
+ """
+ builds complete url
+ """
+ base_uri = 'https://xxx.xxx.x.x:443/api'
+ mocker.patch(MODULE_UTIL_PATH + 'redfish.Redfish._get_base_url',
+ return_value=base_uri)
+ url = session_utils_object._build_url("", None)
+ assert url == ""
+
+ def test_invalid_json_openurlresp(self):
+ """
+ Test the behavior when an invalid JSON string is passed to the `OpenURLResponse` object.
+
+ This test case creates an instance of the `OpenURLResponse` class with an empty dictionary
+ as the initial data.
+ Then, it sets the `body` attribute of the object to an invalid JSON string.
+ Finally, it asserts that calling the `json_data` attribute raises a `ValueError` with the
+ message "Unable to parse json".
+
+ Parameters:
+ self (TestCase): The current test case instance.
+
+ Returns:
+ None
+ """
+ obj = OpenURLResponse({})
+ obj.body = 'invalid json'
+ with pytest.raises(ValueError) as e:
+ obj.json_data
+ assert e.value.args[0] == "Unable to parse json"
+
+ def test_reason(self):
+ """
+ Test the `reason` property of the `OpenURLResponse` class.
+
+ This test case mocks the `read` method of the `obj` object to return an empty JSON string.
+ It then creates an instance of the `OpenURLResponse` class with the mocked `obj` object.
+ The `reason` property of the `OpenURLResponse` instance is then accessed and stored in the
+ `reason_ret` variable. Finally, the test asserts that the value of `reason_ret` is equal to
+ the expected value of "returning reason".
+
+ Parameters:
+ self (TestCase): The test case object.
+
+ Returns:
+ None
+ """
+ def mock_read():
+ return "{}"
+ obj = MagicMock()
+ obj.reason = "returning reason"
+ obj.read = mock_read
+ ourl = OpenURLResponse(obj)
+ reason_ret = ourl.reason
+ assert reason_ret == "returning reason"
+
+ def test_requests_ca_bundle_set(self, mocker, mock_response, session_utils_object):
+ """
+ Test if the `REQUESTS_CA_BUNDLE` environment variable is set correctly.
+
+ This function tests if the `REQUESTS_CA_BUNDLE` environment variable is set to the expected
+ value. It does this by setting the environment variable to a specific path, patching the
+ `invoke_request` method of the `session_utils_object` to return a mock response, and then
+ calling the `_get_omam_ca_env` method of the `session_utils_object`. Finally, it asserts
+ that the result of the `_get_omam_ca_env` method is equal to the expected path.
+
+ Parameters:
+ - mocker (MockerFixture): A fixture provided by the pytest library used to patch the
+ `invoke_request` method.
+ - mock_response (Mock): A mock object representing the response returned by the
+ `invoke_request` method.
+ - session_utils_object (SessionUtils): An instance of the `SessionUtils` class.
+
+ Returns:
+ None
+ """
+ os.environ["REQUESTS_CA_BUNDLE"] = "/path/to/requests_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = session_utils_object._get_omam_ca_env()
+ assert result == "/path/to/requests_ca_bundle.pem"
+ del os.environ["REQUESTS_CA_BUNDLE"]
+
+ def test_curl_ca_bundle_set(self, mocker, mock_response, session_utils_object):
+ """
+ Test the functionality of the `curl_ca_bundle_set` method.
+
+ This test case verifies that the `curl_ca_bundle_set` method correctly sets the
+ `CURL_CA_BUNDLE` environment variable and retrieves the value using the `_get_omam_ca_env`
+ method.
+
+ Parameters:
+ - mocker (MockerFixture): A fixture provided by the pytest-mock library used to patch
+ the `invoke_request` method.
+ - mock_response (MagicMock): A mock object representing the response returned by the
+ `invoke_request` method.
+ - session_utils_object (SessionUtils): An instance of the `SessionUtils` class.
+
+ Returns:
+ None
+
+ Raises:
+ AssertionError: If the retrieved value from `_get_omam_ca_env` does not match the
+ expected value.
+
+ Note:
+ - The test case sets the `CURL_CA_BUNDLE` environment variable to
+ "/path/to/curl_ca_bundle.pem" before executing the test.
+ - The test case deletes the `CURL_CA_BUNDLE` environment variable after the test is
+ completed.
+ """
+ os.environ["CURL_CA_BUNDLE"] = "/path/to/curl_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = session_utils_object._get_omam_ca_env()
+ assert result == "/path/to/curl_ca_bundle.pem"
+ del os.environ["CURL_CA_BUNDLE"]
+
+ def test_omam_ca_bundle_set(self, mocker, mock_response, session_utils_object):
+ """
+ Test the functionality of the `_get_omam_ca_env` method in the `SessionUtils` class.
+
+ This test case verifies that the `_get_omam_ca_env` method correctly retrieves the value of
+ the `OMAM_CA_BUNDLE` environment variable and returns it.
+
+ Parameters:
+ - mocker (MockerFixture): A fixture provided by the pytest library used for mocking
+ objects.
+ - mock_response (MagicMock): A mock object representing the response returned by the
+ `invoke_request` method.
+ - session_utils_object (SessionUtils): An instance of the `SessionUtils` class.
+
+ Returns:
+ None
+
+ Raises:
+ AssertionError: If the returned value from `_get_omam_ca_env` does not match the
+ expected value.
+
+ Side Effects:
+ - Sets the value of the `OMAM_CA_BUNDLE` environment variable to
+ "/path/to/omam_ca_bundle.pem".
+ - Deletes the `OMAM_CA_BUNDLE` environment variable after the test case is complete.
+ """
+ os.environ["OMAM_CA_BUNDLE"] = "/path/to/omam_ca_bundle.pem"
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = session_utils_object._get_omam_ca_env()
+ assert result == "/path/to/omam_ca_bundle.pem"
+ del os.environ["OMAM_CA_BUNDLE"]
+
+ def test_no_env_variable_set(self, mocker, mock_response, session_utils_object):
+ """
+ Test the case when no environment variable is set.
+
+ Args:
+ mocker (MockerFixture): The mocker fixture used to mock functions and objects.
+ mock_response (MagicMock): The mock response object used to simulate API responses.
+ session_utils_object (SessionUtils): The SessionUtils object under test.
+
+ Returns:
+ None
+
+ Asserts:
+ - The result of the _get_omam_ca_env() method is None.
+ """
+ mocker.patch(MODULE_UTIL_PATH + INVOKE_REQUEST,
+ return_value=mock_response)
+ result = session_utils_object._get_omam_ca_env()
+ assert result is None
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py
new file mode 100644
index 000000000..987ff83d2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py
@@ -0,0 +1,1057 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.0.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+from io import StringIO
+import json
+import tempfile
+
+import pytest
+from urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils._text import to_text
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_diagnostics
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+from ansible_collections.dellemc.openmanage.plugins.modules.idrac_diagnostics import main
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_diagnostics.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+SUCCESS_EXPORT_MSG = "Successfully exported the diagnostics."
+FAILURE_EXPORT_MSG = "Unable to copy the ePSA Diagnostics results file to the network share."
+SUCCESS_RUN_MSG = "Successfully ran the diagnostics operation."
+SUCCESS_RUN_AND_EXPORT_MSG = "Successfully ran and exported the diagnostics."
+RUNNING_RUN_MSG = "Successfully triggered the job to run diagnostics."
+ALREADY_RUN_MSG = "The diagnostics job is already present."
+INVALID_DIRECTORY_MSG = "Provided directory path '{path}' is not valid."
+NO_OPERATION_SKIP_MSG = "The operation is skipped."
+INSUFFICIENT_DIRECTORY_PERMISSION_MSG = "Provided directory path '{path}' is not writable. " \
+ "Please check if the directory has appropriate permissions"
+UNSUPPORTED_FIRMWARE_MSG = "iDRAC firmware version is not supported."
+TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The parameter `job_wait_timeout` value cannot be negative or zero."
+WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+START_TIME = "The specified scheduled time occurs in the past, " \
+ "provide a future time to schedule the job."
+INVALID_TIME = "The specified date and time `{0}` to schedule the diagnostics is not valid. Enter a valid date and time."
+END_START_TIME = "The end time `{0}` to schedule the diagnostics must be greater than the start time `{1}`."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_FILE = "The diagnostics file does not exist."
+
+PROXY_SERVER = "proxy.example.com"
+PAYLOAD_FUNC = "Diagnostics.get_payload_details"
+VALIDATE_TIME_FUNC = "RunDiagnostics._RunDiagnostics__validate_time"
+EXPORT_FUNC = "ExportDiagnostics._ExportDiagnostics__export_diagnostics"
+RUN_EXEC_FUNC = "RunDiagnostics.execute"
+MESSAGE_EXTENDED = "@Message.ExtendedInfo"
+DIAGS_ODATA = "/DiagnosticsService"
+REDFISH = "/redfish/v1"
+REDFISH_DIAGNOSTICS_URL = "/redfish/v1/diagnostics"
+REDFISH_BASE_API = '/redfish/v1/api'
+MANAGER_URI_ONE = "/redfish/v1/managers/1"
+API_ONE = "/local/action"
+EXPORT_URL_MOCK = '/redfish/v1/export_diagnostics'
+RUN_URL_MOCK = '/redfish/v1/import_diagnostics'
+API_INVOKE_MOCKER = "iDRACRedfishAPI.invoke_request"
+ODATA = "@odata.id"
+DIAGS_FILE_NAME = 'test_diagnostics.txt'
+SHARE_NAME = tempfile.gettempdir()
+IP = "X.X.X.X"
+HTTPS_PATH = "https://testhost.com"
+HTTP_ERROR = "http error message"
+APPLICATION_JSON = "application/json"
+
+
+class TestDiagnostics(FakeAnsibleModule):
+ module = idrac_diagnostics
+
+ @pytest.fixture
+ def idrac_diagnostics_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_diagnostics_mock(self, mocker, idrac_diagnostics_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_diagnostics_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_diagnostics_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_diagnostics_mock):
+ obj = MagicMock()
+ diagnostics_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, obj)
+ diagnostics_obj.execute()
+
+ def test_get_payload_details(self, idrac_connection_diagnostics_mock):
+ obj = MagicMock()
+ diags_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, obj)
+ # Scenario 1: With all values
+ obj.params.get.return_value = {
+ 'ip_address': IP,
+ 'share_name': 'my_share',
+ 'username': 'my_user',
+ 'password': 'my_password',
+ 'file_name': DIAGS_FILE_NAME,
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'on',
+ 'proxy_support': 'parameters_proxy',
+ 'proxy_type': 'socks',
+ 'proxy_server': PROXY_SERVER,
+ 'proxy_port': 8080,
+ 'proxy_username': 'my_username',
+ 'proxy_password': 'my_password'
+ }
+ result = diags_obj.get_payload_details()
+ expected_result = {
+ 'IPAddress': IP,
+ 'ShareName': 'my_share',
+ 'UserName': 'my_user',
+ 'Password': 'my_password',
+ 'FileName': DIAGS_FILE_NAME,
+ 'ShareType': 'HTTP',
+ 'IgnoreCertWarning': 'On',
+ 'ProxySupport': 'ParametersProxy',
+ 'ProxyType': 'SOCKS',
+ 'ProxyServer': PROXY_SERVER,
+ 'ProxyPort': '8080',
+ 'ProxyUname': 'my_username',
+ 'ProxyPasswd': 'my_password'
+ }
+ assert result == expected_result
+
+ # Scenario 2: With no proxy values
+ obj.params.get.return_value = {
+ 'ip_address': IP,
+ 'share_name': 'my_share',
+ 'username': 'my_user',
+ 'password': 'my_password',
+ 'file_name': DIAGS_FILE_NAME,
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'on'
+ }
+ result = diags_obj.get_payload_details()
+ expected_result = {
+ 'IPAddress': IP,
+ 'ShareName': 'my_share',
+ 'UserName': 'my_user',
+ 'Password': 'my_password',
+ 'FileName': DIAGS_FILE_NAME,
+ 'ShareType': 'HTTP',
+ 'IgnoreCertWarning': 'On'
+ }
+ assert result == expected_result
+
+ # Scenario 3: With no proxy username and password values
+ obj.params.get.return_value = {
+ 'ip_address': IP,
+ 'share_name': 'my_share',
+ 'username': 'my_user',
+ 'password': 'my_password',
+ 'file_name': DIAGS_FILE_NAME,
+ 'share_type': 'http',
+ 'ignore_certificate_warning': 'on',
+ 'proxy_support': 'parameters_proxy',
+ 'proxy_type': 'socks',
+ 'proxy_server': PROXY_SERVER,
+ 'proxy_port': 8080
+ }
+ result = diags_obj.get_payload_details()
+ expected_result = {
+ 'IPAddress': IP,
+ 'ShareName': 'my_share',
+ 'UserName': 'my_user',
+ 'Password': 'my_password',
+ 'FileName': DIAGS_FILE_NAME,
+ 'ShareType': 'HTTP',
+ 'IgnoreCertWarning': 'On',
+ 'ProxySupport': 'ParametersProxy',
+ 'ProxyType': 'SOCKS',
+ 'ProxyServer': PROXY_SERVER,
+ 'ProxyPort': '8080'
+ }
+ assert result == expected_result
+
+ def test_network_share(self, idrac_connection_diagnostics_mock, idrac_default_args, mocker):
+ # Scenario 1: ShareType is LOCAL and directory is invalid
+ payload = {"FileName": DIAGS_FILE_NAME, "ShareType": "LOCAL", "ShareName": "my_share"}
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value=payload)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ diagnostics_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ diagnostics_obj.test_network_share()
+ assert exc.value.args[0] == INVALID_DIRECTORY_MSG.format(path="my_share")
+
+ # Scenario 2: ShareType is LOCAL and directory is not writable
+ payload = {"FileName": DIAGS_FILE_NAME, "ShareType": "HTTP", "ShareName": SHARE_NAME}
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value=payload)
+ mocker.patch(MODULE_PATH + "Diagnostics.get_test_network_share_url", return_value=API_ONE)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ diagnostics_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ ob = diagnostics_obj.test_network_share()
+ assert ob is None
+
+ # Scenario 3: ShareType is not LOCAL
+ obj = MagicMock()
+ payload = {"FileName": DIAGS_FILE_NAME, "ShareType": "HTTP", "ShareName": "my_share"}
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value=payload)
+ mocker.patch(MODULE_PATH + "Diagnostics.get_test_network_share_url", return_value=API_ONE)
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ diagnostics_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ diagnostics_obj.test_network_share()
+
+ # Scenario 4: HTTP Error
+ payload = {"FileName": DIAGS_FILE_NAME, "ShareType": "HTTP", "ShareName": "my_share"}
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value=payload)
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "123",
+ "Message": "Error"
+ }
+ ]}}))
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ side_effect=HTTPError(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ diagnostics_obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ diagnostics_obj.test_network_share()
+ assert exc.value.args[0] == 'Error'
+
+ def test_get_test_network_share_url(self, idrac_connection_diagnostics_mock, idrac_default_args, mocker):
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {"Oem": {"Dell": {"DellLCService": {ODATA: DIAGS_ODATA}}}},
+ "Actions": {"#DellLCService.TestNetworkShare": {"target": API_ONE}}})
+
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ resp = obj.get_test_network_share_url()
+ assert resp == API_ONE
+
+ # Scenario 2: for error message
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, "Error"))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ obj = self.module.Diagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ obj.get_test_network_share_url()
+ assert exc.value.args[0] == "Error"
+
+
+class TestRunDiagnostics(FakeAnsibleModule):
+ module = idrac_diagnostics
+
+ @pytest.fixture
+ def idrac_diagnostics_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_diagnostics_mock(self, mocker, idrac_diagnostics_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_diagnostics_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_diagnostics_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ # Scenario 1: JobState is completed
+ job = {"JobState": "Completed"}
+ mocker.patch(MODULE_PATH + "Diagnostics.test_network_share", return_value=None)
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__get_run_diagnostics_url", return_value=None)
+ mocker.patch(MODULE_PATH + "RunDiagnostics.check_diagnostics_jobs", return_value=None)
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__run_diagnostics", return_value=obj)
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__perform_job_wait", return_value=job)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = run_diagnostics_obj.execute()
+ assert msg == SUCCESS_RUN_MSG
+ assert job_status == job
+ assert file_path is None
+
+ # Scenario 2: JobState is scheduled
+ job = {"JobState": "Scheduled"}
+ idrac_default_args.update({'export': True})
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__perform_job_wait", return_value=job)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = run_diagnostics_obj.execute()
+ assert msg == RUNNING_RUN_MSG
+ assert job_status == job
+ assert file_path is None
+
+ def test_run_diagnostics(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__get_run_diagnostics_url", return_value=API_ONE)
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__validate_time_format", return_value=True)
+ mocker.patch(MODULE_PATH + VALIDATE_TIME_FUNC, return_value=True)
+ mocker.patch(MODULE_PATH + "RunDiagnostics._RunDiagnostics__validate_end_time", return_value=True)
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+
+ # Scenario 1: With start and end time
+ run_params = {
+ 'run_mode': 'express',
+ 'reboot_type': 'power_cycle',
+ 'scheduled_start_time': '20240715235959',
+ 'scheduled_end_time': '20250715235959'
+ }
+ idrac_default_args.update(run_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ status = run_diagnostics_obj._RunDiagnostics__run_diagnostics()
+ assert status == obj
+
+ # Scenario 2: Without time
+ run_params = {
+ 'run_mode': 'express',
+ 'reboot_type': 'force'
+ }
+ idrac_default_args.update(run_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ status = run_diagnostics_obj._RunDiagnostics__run_diagnostics()
+ assert status == obj
+
+ # Scenario 3: With start and end time as empty
+ run_params = {
+ 'run_mode': 'express',
+ 'reboot_type': 'power_cycle',
+ 'scheduled_start_time': '',
+ 'scheduled_end_time': ''
+ }
+ idrac_default_args.update(run_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ status = run_diagnostics_obj._RunDiagnostics__run_diagnostics()
+ assert status == obj
+
+ # Scenario 4: With start time
+ run_params = {
+ 'run_mode': 'express',
+ 'reboot_type': 'power_cycle',
+ 'scheduled_start_time': '20200715235959'
+ }
+ mocker.patch(MODULE_PATH + VALIDATE_TIME_FUNC, return_value=False)
+ idrac_default_args.update(run_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ status = run_diagnostics_obj._RunDiagnostics__run_diagnostics()
+ assert status == obj
+
+ # Scenario 5: With end time
+ run_params = {
+ 'run_mode': 'express',
+ 'reboot_type': 'power_cycle',
+ 'scheduled_end_time': '20200715235959'
+ }
+ mocker.patch(MODULE_PATH + VALIDATE_TIME_FUNC, return_value=False)
+ idrac_default_args.update(run_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ status = run_diagnostics_obj._RunDiagnostics__run_diagnostics()
+ assert status == obj
+
+ def test_get_run_diagnostics_url(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ # Scenario 1: With url
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {"Oem": {"Dell": {"DellLCService": {ODATA: DIAGS_ODATA}}}},
+ "Actions": {"#DellLCService.RunePSADiagnostics": {"target": API_ONE}}})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ run_diagnostics_obj._RunDiagnostics__get_run_diagnostics_url()
+ assert run_diagnostics_obj.run_url == API_ONE
+
+ # Scenario 2: When url is empty for Links
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {}})
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__get_run_diagnostics_url()
+ assert exc.value.args[0] == UNSUPPORTED_FIRMWARE_MSG
+
+ # Scenario 3: For error message
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, "error"))
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__get_run_diagnostics_url()
+ assert exc.value.args[0] == "error"
+
+ def test_check_diagnostics_jobs(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ temp_list = {"Members": [{"Id": "JID_123", "JobType": "RemoteDiagnostics", "JobState": "New"}]}
+ obj.json_data = temp_list
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+
+ # Scenario 1: Check mode with job id
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj.check_diagnostics_jobs()
+ assert exc.value.args[0] == ALREADY_RUN_MSG
+
+ # Scenario 2: Check mode without job id
+ temp_list = {"Members": [{"Id": "", "JobType": "Test", "JobState": "New"}]}
+ obj.json_data = temp_list
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj.check_diagnostics_jobs()
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 3: Normal mode with job id
+ temp_list = {"Members": [{"Id": "666", "JobType": "RemoteDiagnostics", "JobState": "New"}]}
+ obj.json_data = temp_list
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj.check_diagnostics_jobs()
+ assert exc.value.args[0] == ALREADY_RUN_MSG
+
+ # Scenario 4: Normal mode without job id
+ temp_list = {"Members": [{"Id": "", "JobType": "RemoteDiagnostics", "JobState": "New"}]}
+ obj.json_data = temp_list
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ resp = run_diagnostics_obj.check_diagnostics_jobs()
+ assert resp is None
+
+ def test_validate_job_timeout(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ # Scenario 1: Negative timeout
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': -120})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__validate_job_timeout()
+ assert exc.value.args[0] == TIMEOUT_NEGATIVE_OR_ZERO_MSG
+
+ # Scenario 2: Valid timeout
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': 120})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ resp = run_diagnostics_obj._RunDiagnostics__validate_job_timeout()
+ assert resp is None
+
+ def test_validate_time_format(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ idrac_default_args.update({'time': "20250715235959"})
+ # Scenario 1: Time with offset
+ time = "2024-09-14T05:59:35-05:00"
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ formatted_time = run_diagnostics_obj._RunDiagnostics__validate_time_format(time)
+ assert formatted_time == "20240914055935"
+
+ # Scenario 2: Time without offset
+ time = "20250715235959"
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ formatted_time = run_diagnostics_obj._RunDiagnostics__validate_time_format(time)
+ assert formatted_time == "20250715235959"
+
+ # Scenario 3: Invalid time
+ time = "2025"
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__validate_time_format(time)
+ assert exc.value.args[0] == INVALID_TIME.format(time)
+
+ def test_validate_time(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ resp = ("2024-09-14T05:59:35-05:00", "-05:00")
+ mocker.patch(MODULE_PATH + "get_current_time", return_value=resp)
+
+ # Scenario 1: Future time
+ idrac_default_args.update({'time': "20250715235959"})
+ time = idrac_default_args['time']
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ assert run_diagnostics_obj._RunDiagnostics__validate_time(time) is True
+
+ # Scenario 2: Past time
+ idrac_default_args.update({'time': "20230715235959"})
+ time = idrac_default_args['time']
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__validate_time(time)
+ assert exc.value.args[0] == START_TIME
+
+ def test_validate_end_time(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ # Scenario 1: start_time less than end_time
+ start_time = "20230715235959"
+ end_time = "20240715235959"
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ assert run_diagnostics_obj._RunDiagnostics__validate_end_time(start_time, end_time) is True
+
+ # Scenario 2: start_time greater than end_time
+ start_time = "20250715235959"
+ end_time = "20240715235959"
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__validate_end_time(start_time, end_time)
+ assert exc.value.args[0] == END_START_TIME.format(end_time, start_time)
+
+ def test_perform_job_wait(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ # Scenario 1: When JobState is completed
+ obj = MagicMock()
+ obj.headers = {'Location': REDFISH_BASE_API}
+ obj.json_data = {'JobState': 'Completed'}
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking",
+ return_value=(False, 'msg', obj.json_data, 120))
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': 1200})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ job_dict = run_diagnostics_obj._RunDiagnostics__perform_job_wait(obj)
+ assert job_dict == obj.json_data
+
+ # Scenario 2: When wait time is less
+ obj = MagicMock()
+ obj.headers = {'Location': REDFISH_BASE_API}
+ obj.json_data = {'JobState': 'Scheduled'}
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking",
+ return_value=(False, 'msg', obj.json_data, 120))
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': 10})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__perform_job_wait(obj)
+ assert exc.value.args[0] == WAIT_TIMEOUT_MSG.format(10)
+
+ # Scenario 3: When JobState is Failed
+ obj = MagicMock()
+ obj.headers = {'Location': REDFISH_BASE_API}
+ obj.json_data = {'JobState': 'Failed', 'Message': 'Job Failed'}
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking",
+ return_value=(True, 'msg', obj.json_data, 120))
+ idrac_default_args.update({'job_wait': True, 'job_wait_timeout': 1200})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ run_diagnostics_obj._RunDiagnostics__perform_job_wait(obj)
+ assert exc.value.args[0] == 'Job Failed'
+
+ # Scenario 4: When job_wait is False
+ obj = MagicMock()
+ obj.headers = {'Location': REDFISH_BASE_API}
+ obj.json_data = {'JobState': 'Scheduled'}
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking",
+ return_value=(True, 'msg', obj.json_data, 120))
+ idrac_default_args.update({'job_wait': False, 'job_wait_timeout': 1200})
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ job_dict = run_diagnostics_obj._RunDiagnostics__perform_job_wait(obj)
+ assert job_dict == obj.json_data
+
+ # Scenario 5: When there's no job uri
+ obj = MagicMock()
+ obj.headers = {'Location': ''}
+ idrac_default_args.update({'job_wait': False, 'job_wait_timeout': 1200})
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_diagnostics_obj = self.module.RunDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ job_dict = run_diagnostics_obj._RunDiagnostics__perform_job_wait(obj)
+ assert job_dict == {}
+
+
+class TestExportDiagnostics(FakeAnsibleModule):
+ module = idrac_diagnostics
+
+ @pytest.fixture
+ def idrac_diagnostics_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_diagnostics_mock(self, mocker, idrac_diagnostics_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_diagnostics_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_diagnostics_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.headers = {"Location": REDFISH}
+ obj.status_code = 200
+ obj.share_name = SHARE_NAME
+ obj.file_name = DIAGS_FILE_NAME
+ mocker.patch(MODULE_PATH + "Diagnostics.test_network_share", return_value=None)
+ mocker.patch(MODULE_PATH + "ExportDiagnostics._ExportDiagnostics__get_export_diagnostics_url", return_value=None)
+ mocker.patch(MODULE_PATH + "ExportDiagnostics._ExportDiagnostics__export_diagnostics_local", return_value=obj)
+
+ # Scenario 1: share_type = local
+ export_params = {'share_parameters': {'share_type': "local"}}
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = export_diagnostics_obj.execute()
+ assert msg == SUCCESS_EXPORT_MSG
+ assert job_status == {}
+ assert file_path == 'None/None'
+
+ # Scenario 2: share_type = nfs
+ job = {"JobState": "Completed"}
+ export_params = {'share_parameters': {'share_type': "nfs"}}
+ mocker.patch(MODULE_PATH + "ExportDiagnostics._ExportDiagnostics__export_diagnostics_nfs", return_value=obj)
+ mocker.patch(MODULE_PATH + "ExportDiagnostics.get_job_status", return_value=job)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = export_diagnostics_obj.execute()
+ assert msg == SUCCESS_EXPORT_MSG
+ assert job_status == job
+ assert file_path == 'None/None'
+
+ # Scenario 3: Check mode
+ obj.status = 400
+ mocker.patch(MODULE_PATH + "ExportDiagnostics.perform_check_mode", return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ export_diagnostics_obj.execute()
+
+ def test_export_diagnostics_local(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ export_params = {
+ 'share_parameters': {
+ 'share_name': SHARE_NAME,
+ 'file_name': DIAGS_FILE_NAME
+ }
+ }
+ obj = MagicMock()
+ obj.status = 200
+ obj.headers = {'Location': REDFISH_BASE_API}
+ obj.filename = DIAGS_FILE_NAME
+ mocker.patch(MODULE_PATH + 'ExportDiagnostics._ExportDiagnostics__export_diagnostics', return_value=obj)
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception):
+ export_diagnostics_obj._ExportDiagnostics__export_diagnostics_local()
+
+ def test_export_diagnostics_http(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value=None)
+ mocker.patch(MODULE_PATH + EXPORT_FUNC, return_value=obj)
+ # Scenario 1: With ipv4
+ export_params = {
+ 'share_parameters': {
+ 'ip_address': IP,
+ 'file_name': 'test_diags',
+ 'share_type': 'http',
+ 'share_name': 'myshare'
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics_http()
+ assert result == obj
+
+ # Scenario 2: With ipv6
+ export_params = {
+ 'share_parameters': {
+ 'ip_address': 'XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX',
+ 'file_name': 'test_diags',
+ 'share_type': 'http',
+ 'share_name': 'myshare'
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics_http()
+ assert result == obj
+
+ def test_export_diagnostics_cifs(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value={})
+ mocker.patch(MODULE_PATH + EXPORT_FUNC, return_value=obj)
+ # Scenario 1: With workgroup
+ export_params = {
+ 'share_parameters': {
+ 'file_name': 'test_diags',
+ 'share_type': 'cifs',
+ 'share_name': 'myshare',
+ 'ignore_certificate_warning': 'off',
+ 'workgroup': 'myworkgroup'
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics_cifs()
+ assert result == obj
+
+ # Scenario 2: Without workgroup
+ export_params = {
+ 'share_parameters': {
+ 'file_name': 'test_diags',
+ 'share_type': 'cifs',
+ 'share_name': 'myshare',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics_cifs()
+ assert result == obj
+
+ def test_export_diagnostics_nfs(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value={"UserName": "user", "Password": "password"})
+ mocker.patch(MODULE_PATH + EXPORT_FUNC, return_value=obj)
+ export_params = {
+ 'share_parameters': {
+ 'share_name': 'share',
+ 'share_type': 'nfs',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics_nfs()
+ assert result == obj
+
+ def test_get_export_diagnostics_url(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ export_params = {
+ 'share_parameters': {
+ 'file_name': DIAGS_FILE_NAME,
+ 'share_type': 'local',
+ 'ignore_certificate_warning': 'off'
+ }
+ }
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, None))
+ # Scenario 1: With url
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {"Oem": {"Dell": {"DellLCService": {ODATA: DIAGS_ODATA}}}},
+ "Actions": {"#DellLCService.ExportePSADiagnosticsResult": {"target": API_ONE}}})
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ export_diagnostics_obj._ExportDiagnostics__get_export_diagnostics_url()
+ assert export_diagnostics_obj.export_url == API_ONE
+
+ # Scenario 2: When url is empty
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value={"Links": {}})
+ with pytest.raises(Exception) as exc:
+ export_diagnostics_obj._ExportDiagnostics__get_export_diagnostics_url()
+ assert exc.value.args[0] == UNSUPPORTED_FIRMWARE_MSG
+
+ # Scenario 3: For error message
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, "error"))
+ with pytest.raises(Exception) as exc:
+ export_diagnostics_obj._ExportDiagnostics__get_export_diagnostics_url()
+ assert exc.value.args[0] == "error"
+
+ def test_export_diagnostics(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+ payload = mocker.patch(MODULE_PATH + PAYLOAD_FUNC, return_value={})
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ mocker.patch(MODULE_PATH + "ExportDiagnostics._ExportDiagnostics__get_export_diagnostics_url", return_value=API_ONE)
+ # Scenario 1: With file name
+ export_params = {
+ 'share_parameters': {
+ 'file_name': DIAGS_FILE_NAME
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics(payload)
+ assert result == obj
+
+ # Scenario 2: Without file name
+ export_params = {
+ 'idrac_ip': IP,
+ 'share_parameters': {
+ 'file_name': ''
+ }
+ }
+ idrac_default_args.update(export_params)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ result = export_diagnostics_obj._ExportDiagnostics__export_diagnostics(payload)
+ assert result == obj
+
+ def test_get_job_status_success(self, mocker, idrac_diagnostics_mock):
+ obj = self.get_module_mock()
+ diagnostics_job_response_mock = mocker.MagicMock()
+ diagnostics_job_response_mock.headers.get.return_value = "HTTPS_PATH/job_tracking/12345"
+ mocker.patch(MODULE_PATH + "remove_key", return_value={"job_details": "mocked_job_details"})
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri", return_value=[MANAGER_URI_ONE])
+ obj_under_test = self.module.ExportDiagnostics(idrac_diagnostics_mock, obj)
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(False, "mocked_message", {"job_details": "mocked_job_details"}, 0))
+ result = obj_under_test.get_job_status(diagnostics_job_response_mock)
+ assert result == {"job_details": "mocked_job_details"}
+
+ def test_get_job_status_failure(self, mocker, idrac_diagnostics_mock):
+ obj = self.get_module_mock()
+ diagnostics_job_response_mock = mocker.MagicMock()
+ diagnostics_job_response_mock.headers.get.return_value = "HTTPS_PATH/job_tracking/12345"
+ mocker.patch(MODULE_PATH + "remove_key", return_value={"Message": "None"})
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri", return_value=[MANAGER_URI_ONE])
+ obj_under_test = self.module.ExportDiagnostics(idrac_diagnostics_mock, obj)
+ mocker.patch(MODULE_PATH + "idrac_redfish_job_tracking", return_value=(True, "None", {"Message": "None"}, 0))
+ exit_json_mock = mocker.patch.object(obj, "exit_json")
+ result = obj_under_test.get_job_status(diagnostics_job_response_mock)
+ exit_json_mock.assert_called_once_with(msg="None", failed=True, job_details={"Message": "None"})
+ assert result == {"Message": "None"}
+
+ def test_perform_check_mode(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ # Scenario 1: With status code 200
+ obj.status_code = 200
+ idrac_default_args.update({'ShareType': 'Local'})
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ export_diagnostics_obj.perform_check_mode()
+ assert exc.value.args[0] == CHANGES_FOUND_MSG
+
+ # Scenario 2: With status code 400
+ obj.status_code = 400
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ val = export_diagnostics_obj.perform_check_mode()
+ assert val is None
+
+ # Scenario 3: HTTP Error with message id SYS099
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "SYS099",
+ "Message": NO_FILE
+ }
+ ]}}))
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ side_effect=HTTPError(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ export_diagnostics_obj.perform_check_mode()
+ assert exc.value.args[0] == NO_FILE
+
+ # Scenario 4: HTTP Error without message id
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "123",
+ "Message": "error"
+ }
+ ]}}))
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
+ side_effect=HTTPError(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ export_diagnostics_obj = self.module.ExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ val = export_diagnostics_obj.perform_check_mode()
+ assert val is None
+
+
+class TestRunAndExportDiagnostics(FakeAnsibleModule):
+ module = idrac_diagnostics
+
+ @pytest.fixture
+ def idrac_diagnostics_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_diagnostics_mock(self, mocker, idrac_diagnostics_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_diagnostics_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_diagnostics_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_diagnostics_mock, mocker):
+ obj = MagicMock()
+ obj.status_code = 200
+
+ def export_execute():
+ msg = SUCCESS_EXPORT_MSG
+ job_status = "None"
+ file_path = SHARE_NAME
+ return msg, job_status, file_path
+
+ # Scenario 1: When job wait is true
+ idrac_default_args.update({'job_wait': True})
+ mocker.patch(MODULE_PATH + "RunDiagnostics", return_value=obj)
+ obj.execute = export_execute
+ mocker.patch(MODULE_PATH + "ExportDiagnostics", return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_and_export_obj = self.module.RunAndExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = run_and_export_obj.execute()
+ assert msg == SUCCESS_RUN_AND_EXPORT_MSG
+
+ # Scenario 2: When job wait is false
+ def run_execute():
+ msg = RUNNING_RUN_MSG
+ job_status = "None"
+ file_path = "None"
+ return msg, job_status, file_path
+
+ idrac_default_args.update({'job_wait': False})
+ obj.execute = run_execute
+ mocker.patch(MODULE_PATH + "RunDiagnostics", return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ run_obj = self.module.RunAndExportDiagnostics(idrac_connection_diagnostics_mock, f_module)
+ msg, job_status, file_path = run_obj.execute()
+ assert msg == RUNNING_RUN_MSG
+
+
+class TestDiagnosticsType(FakeAnsibleModule):
+ module = idrac_diagnostics
+
+ @pytest.fixture
+ def idrac_diagnostics_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_diagnostics_mock(self, mocker, idrac_diagnostics_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_diagnostics_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_diagnostics_mock
+ return idrac_conn_mock
+
+ def test_diagnostics_operation(self, idrac_default_args, idrac_connection_diagnostics_mock):
+ idrac_default_args.update({"run": True, "export": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ diags_class = self.module.DiagnosticsType.diagnostics_operation(idrac_connection_diagnostics_mock, f_module)
+ assert isinstance(diags_class, self.module.RunDiagnostics)
+
+ idrac_default_args.update({"run": False, "export": True})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ diags_class = self.module.DiagnosticsType.diagnostics_operation(idrac_connection_diagnostics_mock, f_module)
+ assert isinstance(diags_class, self.module.ExportDiagnostics)
+
+ idrac_default_args.update({"run": True, "export": True})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ diags_class = self.module.DiagnosticsType.diagnostics_operation(idrac_connection_diagnostics_mock, f_module)
+ assert isinstance(diags_class, self.module.RunAndExportDiagnostics)
+
+ idrac_default_args.update({"run": False, "export": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ with pytest.raises(Exception) as exc:
+ self.module.DiagnosticsType.diagnostics_operation(idrac_connection_diagnostics_mock, f_module)
+ assert exc.value.args[0] == NO_OPERATION_SKIP_MSG
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_diagnostics_main_exception_handling_case(self, exc_type, mocker, idrac_default_args):
+ idrac_default_args.update({"run": True})
+ # Scenario 1: HTTPError with message id SYS099
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "SYS099",
+ "Message": "Error"
+ }
+ ]}}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + RUN_EXEC_FUNC,
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ else:
+ mocker.patch(MODULE_PATH + RUN_EXEC_FUNC,
+ side_effect=exc_type('test'))
+ result = self._run_module(idrac_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ assert 'msg' in result
+
+ # Scenario 2: HTTPError with message id SYS098
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "SYS098",
+ "Message": "Error"
+ }
+ ]}}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + RUN_EXEC_FUNC,
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
+
+ # Scenario 3: HTTPError with random message id
+ json_str = to_text(json.dumps({"error": {MESSAGE_EXTENDED: [
+ {
+ 'MessageId': "123",
+ "Message": "Error"
+ }
+ ]}}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + RUN_EXEC_FUNC,
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ result = self._run_module(idrac_default_args)
+ assert 'msg' in result
+
+ def test_main(self, mocker):
+ module_mock = mocker.MagicMock()
+ idrac_mock = mocker.MagicMock()
+ diagnostics_mock = mocker.MagicMock()
+ diagnostics_mock.execute.return_value = (None, None, None)
+
+ mocker.patch(MODULE_PATH + 'get_argument_spec', return_value={})
+ mocker.patch(MODULE_PATH + 'idrac_auth_params', {})
+ mocker.patch(MODULE_PATH + 'AnsibleModule', return_value=module_mock)
+ mocker.patch(MODULE_PATH + 'iDRACRedfishAPI', return_value=idrac_mock)
+ mocker.patch(MODULE_PATH + 'DiagnosticsType.diagnostics_operation', return_value=diagnostics_mock)
+ main()
+ diagnostics_mock.execute.return_value = (None, None, SHARE_NAME)
+ mocker.patch(MODULE_PATH + 'DiagnosticsType.diagnostics_operation', return_value=diagnostics_mock)
+ main()
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
index a6fbb1d04..d8c23160e 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
@@ -2,94 +2,615 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.2.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
-from __future__ import (absolute_import, division, print_function)
+from __future__ import absolute_import, division, print_function
__metaclass__ = type
-import pytest
import json
-from ansible_collections.dellemc.openmanage.plugins.modules import idrac_reset
-from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
-from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
-from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from mock import MagicMock, Mock
+import pytest
from io import StringIO
from ansible.module_utils._text import to_text
+from urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_reset
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_reset.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+MANAGERS_URI = "/redfish/v1/Managers"
+OEM = "Oem"
+MANUFACTURER = "Dell"
+ACTIONS = "Actions"
+IDRAC_RESET_RETRIES = 50
+LC_STATUS_CHECK_SLEEP = 30
+IDRAC_URI = "/redfish/v1/Managers/iDRAC.Embedded.1"
+IDRAC_JOB_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/{job_id}"
+RESET_TO_DEFAULT_ERROR = "{reset_to_default} is not supported. The supported values are {supported_values}. Enter the valid values and retry the operation."
+RESET_TO_DEFAULT_ERROR_MSG = "{reset_to_default} is not supported."
+CUSTOM_ERROR = "{reset_to_default} is not supported on this firmware version of iDRAC. The supported values are {supported_values}. \
+Enter the valid values and retry the operation."
+IDRAC_RESET_RESTART_SUCCESS_MSG = "iDRAC restart operation completed successfully."
+IDRAC_RESET_SUCCESS_MSG = "Successfully performed iDRAC reset."
+IDRAC_RESET_RESET_TRIGGER_MSG = "iDRAC reset operation triggered successfully."
+IDRAC_RESET_RESTART_TRIGGER_MSG = "iDRAC restart operation triggered successfully."
+INVALID_DIRECTORY_MSG = "Provided directory path '{path}' is invalid."
+FAILED_RESET_MSG = "Failed to perform the reset operation."
+RESET_UNTRACK = "iDRAC reset is in progress. Changes will apply once the iDRAC reset operation is successfully completed."
+TIMEOUT_NEGATIVE_OR_ZERO_MSG = "The value of `job_wait_timeout` parameter cannot be negative or zero. Enter the valid value and retry the operation."
+INVALID_FILE_MSG = "File extension is invalid. Supported extension for 'custom_default_file' is: .xml."
+LC_STATUS_MSG = "Lifecycle controller status check is {lc_status} after {retries} number of retries, Exiting.."
+INSUFFICIENT_DIRECTORY_PERMISSION_MSG = "Provided directory path '{path}' is not writable. Please check if the directory has appropriate permissions."
+UNSUPPORTED_LC_STATUS_MSG = "Lifecycle controller status check is not supported."
+CHANGES_NOT_FOUND = "No changes found to commit!"
+CHANGES_FOUND = "Changes found to commit!"
+MINIMUM_SUPPORTED_FIRMWARE_VERSION = "7.00.00"
+SUCCESS_STATUS = "Success"
+FAILED_STATUS = "Failed"
+STATUS_SUCCESS = [200, 202, 204]
+ERR_STATUS_CODE = [400, 404]
+RESET_KEY = "Oem.#DellManager.ResetToDefaults"
+RESTART_KEY = "#Manager.Reset"
+GET_BASE_URI_KEY = "Validation.get_base_uri"
+INVOKE_REQ_KEY = "iDRACRedfishAPI.invoke_request"
+GET_CUSTOM_DEFAULT_KEY = "CustomDefaultsDownloadURI"
+SET_CUSTOM_DEFAULT_KEY = "#DellManager.SetCustomDefaults"
+CHECK_LC_STATUS = "FactoryReset.check_lcstatus"
+RESET_ALLOWABLE_KEY = "ResetType@Redfish.AllowableValues"
+VALIDATE_RESET_OPTION_KEY = "Validation.validate_reset_options"
+FILE_PATH = "/root/custom_default_content.xml"
+CHECK_IDRAC_VERSION = "FactoryReset.is_check_idrac_latest"
+EXECUTE_KEY = "FactoryReset.execute"
+HTTP_ERROR_MSG = "http error message"
+RETURN_TYPE = "application/json"
+FILE_PATH = "abc/test"
+
+
+class TestValidation(FakeAnsibleModule):
+ module = idrac_reset
+ allowed_values = ["All", "Default", "CustomDefaults", "ResetAllWithRootDefaults"]
+ allowed_values_api = {
+ 'Actions':
+ {
+ "#Manager.Reset": {
+ "ResetType@Redfish.AllowableValues": [
+ "Test"
+ ]
+ },
+ "Oem": {
+ "#DellManager.ResetToDefaults": {
+ RESET_ALLOWABLE_KEY: [
+ "All",
+ "Default",
+ "ResetAllWithRootDefaults"
+ ]
+ }
+ }
+ },
+ "Oem": {
+ "Dell": {
+ "CustomDefaultsDownloadURI": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/CustomDefaultsDownloadURI"
+ }
+ }
+ }
+
+ @pytest.fixture
+ def idrac_reset_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_reset_mock(self, mocker, idrac_reset_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_reset_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_reset_mock
+ return idrac_conn_mock
-from pytest import importorskip
+ def test_get_base_uri(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when validate_and_get_first_resource_id_uri return proper uri
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(IDRAC_URI, ''))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ data = idr_obj.get_base_uri()
+ assert data == IDRAC_URI
-importorskip("omsdk.sdkfile")
-importorskip("omsdk.sdkcreds")
+ def test_validate_reset_options(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when key 'OEM' doesn't exist in output from invoke_request
+ obj = MagicMock()
+ obj.json_data = {'Actions': {}}
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_default_args.update({"reset_to_default": 'All'})
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ allowed_values, res = idr_obj.validate_reset_options(RESET_KEY)
+ assert res is False
+
+ # Scenario - when reset_to_default is not in allowable values
+ obj.json_data = self.allowed_values_api
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_default_args.update({"reset_to_default": 'CustomDefaults'})
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ allowed_values, res = idr_obj.validate_reset_options(RESET_KEY)
+ assert res is False
+
+ def test_validate_graceful_restart_option(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when key doesn't exist in output from invoke_request
+ obj = MagicMock()
+ obj.json_data = {'Actions': {}}
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ res = idr_obj.validate_graceful_restart_option(RESTART_KEY)
+ assert res is False
-MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+ # Scenario - when 'GracefulRestart is not in allowable values
+ obj.json_data = self.allowed_values_api
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ res = idr_obj.validate_graceful_restart_option(RESTART_KEY)
+ assert res is False
+ def test_validate_path(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when custom default file path doesn't exist
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + 'os.path.exists', return_value=False)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_path(FILE_PATH)
+ assert exc.value.args[0] == INVALID_DIRECTORY_MSG.format(path=FILE_PATH)
-@pytest.fixture
-def idrac_reset_connection_mock(mocker, idrac_mock):
- idrac_connection_class_mock = mocker.patch(MODULE_PATH + 'idrac_reset.iDRACConnection')
- idrac_connection_class_mock.return_value.__enter__.return_value = idrac_mock
- return idrac_mock
+ # Scenario - when custom default file path exist but not accessible
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + 'os.path.exists', return_value=True)
+ mocker.patch(MODULE_PATH + 'os.access', return_value=False)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_path(FILE_PATH)
+ assert exc.value.args[0] == INSUFFICIENT_DIRECTORY_PERMISSION_MSG.format(path=FILE_PATH)
+ def test_validate_file_format(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when custom default file is not in XML format
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_file_format('abc/test.json')
+ assert exc.value.args[0] == INVALID_FILE_MSG
-class TestReset(FakeAnsibleModule):
+ def test_validate_custom_option_exception_case(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ obj = MagicMock()
+ obj.json_data = self.allowed_values_api
+ json_str = to_text(json.dumps({"data": "out"}))
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + 'get_dynamic_uri', return_value=obj)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=HTTPError("https://test.com", 404, HTTP_ERROR_MSG,
+ {"accept-type": RETURN_TYPE},
+ StringIO(json_str)))
+ idrac_default_args.update({"reset_to_default": 'CustomDefaults'})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(
+ idrac_connection_reset_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_custom_option('CustomDefaults', self.allowed_values)
+ assert exc.value.args[0] == RESET_TO_DEFAULT_ERROR.format(reset_to_default='CustomDefaults', supported_values=self.allowed_values)
+
+ def test_validate_job_wait_negative_values(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - when job_wait_timeout is negative
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY,
+ return_value=IDRAC_URI)
+ idrac_default_args.update({"wait_for_idrac": True, "job_wait_timeout": -120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(idrac_connection_reset_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_job_timeout()
+ assert exc.value.args[0] == TIMEOUT_NEGATIVE_OR_ZERO_MSG
+
+ # Scenario - when job_wait_timeout is positive
+ idrac_default_args.update({"job_wait": True, "job_wait_timeout": 120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.Validation(idrac_connection_reset_mock, f_module)
+ idr_obj.validate_job_timeout()
+
+
+class TestFactoryReset(FakeAnsibleModule):
module = idrac_reset
+ lc_status_api_links = {
+ "Oem": {
+ "Dell": {
+ "DellLCService": {
+ "@odata.id": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellLCService"
+ }
+ }
+ }
+ }
+
+ action_api_resp = {
+ "Actions": {
+ "#DellLCService.GetRemoteServicesAPIStatus": {
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellLCService/Actions/DellLCService.GetRemoteServicesAPIStatus"
+ }
+ }
+ }
+
+ action_api_resp_restart = {
+ RESTART_KEY: {
+ RESET_ALLOWABLE_KEY: [
+ "GracefulRestart"
+ ],
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Manager.Reset"
+ }
+ }
+
+ lc_status_invoke = {
+ "LCStatus": "Ready"
+ }
+ lc_status_invoke_not_ready = {
+ "LCStatus": "Not Initialized"
+ }
+
+ validate_allowed_values = {
+ "Actions": {
+ RESTART_KEY: {
+ RESET_ALLOWABLE_KEY: [
+ "GracefulRestart"
+ ],
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Manager.Reset"
+ },
+ "#Manager.ResetToDefaults": {
+ RESET_ALLOWABLE_KEY: [
+ "ResetAll",
+ "PreserveNetworkAndUsers"
+ ],
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Manager.ResetToDefaults"
+ },
+ "Oem": {
+ "#DellManager.ResetToDefaults": {
+ RESET_ALLOWABLE_KEY: [
+ "All",
+ "CustomDefaults",
+ "Default",
+ "ResetAllWithRootDefaults"
+ ],
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/DellManager.ResetToDefaults"
+ },
+ "#DellManager.SetCustomDefaults": {
+ "target": "/redfish/v1/Managers/iDRAC.Embedded.1/Actions/Oem/DellManager.SetCustomDefaults"
+ },
+ }
+ },
+ "Oem": {
+ "Dell": {
+ "CustomDefaultsDownloadURI": "/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/CustomDefaultsDownloadURI"
+ }
+ }
+ }
+
+ custom_default_content = "<SystemConfiguration Model=\"PowerEdge R7525\" ServiceTag=\"2V4TK93\">\n<Component FQDD=\"iDRAC.Embedded.1\">\n \
+ <Attribute Name=\"IPMILan.1#Enable\">Disabled</Attribute>\n </Component>\n\n</SystemConfiguration>"
@pytest.fixture
- def idrac_mock(self, mocker):
- omsdk_mock = MagicMock()
+ def idrac_reset_mock(self):
idrac_obj = MagicMock()
- omsdk_mock.config_mgr = idrac_obj
- type(idrac_obj).reset_idrac = Mock(return_value="idracreset")
return idrac_obj
@pytest.fixture
- def idrac_config_mngr_reset_mock(self, mocker):
- try:
- config_manager_obj = mocker.patch(MODULE_PATH + 'idrac_reset.config_mgr')
- except AttributeError:
- config_manager_obj = MagicMock()
+ def idrac_connection_reset_mock(self, mocker, idrac_reset_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_reset_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_reset_mock
+ return idrac_conn_mock
+
+ def test_is_check_idrac_latest(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ allowed_values = ["All", "Default", "ResetAllWithRootDefaults"]
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ res = reset_obj.is_check_idrac_latest()
+ assert res is True
+
+ def test_check_mode_output(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ # Scenario - When Reset to default is not passed and check mode is true
+ allowed_values = ["All", "Default", "ResetAllWithRootDefaults"]
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + "Validation.validate_graceful_restart_option", return_value=False)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.check_mode_output(True)
+ assert exc.value.args[0] == CHANGES_NOT_FOUND
+
+ def test_execute(self, idrac_default_args, idrac_connection_reset_mock, mocker):
+ allowed_values = ["All", "Default", "ResetAllWithRootDefaults", "CustomDefaults"]
+ allowed_values_without_cd = ["All", "Default", "ResetAllWithRootDefaults"]
+ # Scenario - When 'GracefulRestart' is not supported and iDRAC8 and check_mode True
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="2.81.81")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + "Validation.validate_graceful_restart_option", return_value=False)
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=False)
+ idrac_default_args.update({})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == CHANGES_NOT_FOUND
+
+ # Scenario: when success message is returned for graceful restart for IDRAC8 or IDRAC9
obj = MagicMock()
- config_manager_obj.config_mgr.return_value = obj
- config_manager_obj.config_mgr.reset_idrac().return_value = obj
- return config_manager_obj
-
- def test_main_idrac_reset_success_case01(self, idrac_reset_connection_mock, idrac_default_args, mocker):
- mocker.patch(MODULE_PATH + "idrac_reset.run_idrac_reset",
- return_value=({"Status": "Success"}, False))
- idrac_reset_connection_mock.config_mgr.reset_idrac.return_value = {"Status": "Success"}
- idrac_reset_connection_mock.config_mgr.reset_idrac.return_value = "Success"
- result = self._run_module(idrac_default_args)
- assert result == {'msg': 'Successfully performed iDRAC reset.',
- 'reset_status': ({'Status': 'Success'}, False), 'changed': False}
+ obj.status_code = 204
+ obj.json_data = self.lc_status_invoke
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == 'Links':
+ return self.lc_status_api_links
+ elif len(args) > 2 and args[2] == 'Actions':
+ return self.action_api_resp_restart
+ return self.action_api_resp
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ msg_resp, resp = reset_obj.execute()
+ assert msg_resp['msg'] == IDRAC_RESET_SUCCESS_MSG
+
+ # Scenario: when success message reset_to_default is passed as 'Default' for idrac9 with job_wait set to True
+ obj.status_code = 200
+ obj2 = MagicMock()
+ obj3 = MagicMock()
+ obj2.json_data = self.validate_allowed_values
+ obj3.json_data = self.lc_status_invoke_not_ready
+
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == 'Links':
+ return self.lc_status_api_links
+ elif len(args) > 2 and args[2] == 'Actions':
+ return self.validate_allowed_values
+ return self.action_api_resp
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj, obj2, obj, URLError('URL error occurred'), obj, URLError('URL error occurred'), obj3, obj])
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ idrac_default_args.update({"reset_to_default": "Default"})
+ idrac_default_args.update({"wait_for_idrac": True})
+ idrac_default_args.update({"job_wait_timeout": 300})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ msg_resp, resp = reset_obj.execute()
+ assert msg_resp['msg'] == IDRAC_RESET_SUCCESS_MSG
+
+ # Scenario: when success message reset_to_default is passed as 'CustomDefaults' with custom_default_buffer
+ obj4 = MagicMock()
+ obj4.json_data = {'LCStatus': 'NOTINITIALIZED'}
+ obj2.headers = {'Location': "/joburl/JID12345"}
+ obj2.status_code = 200
+ # allowed_values.append("CustomDefaults")
+ job_resp_completed = {'JobStatus': 'Completed'}
+ idrac_redfish_resp = (False, 'Job Success', job_resp_completed, 1200)
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, side_effect=[(allowed_values, True), (allowed_values, True)])
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj, obj2, obj, obj2])
+ mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=[self.lc_status_api_links, self.action_api_resp_restart,
+ self.validate_allowed_values, self.validate_allowed_values,
+ self.validate_allowed_values, self.lc_status_api_links, self.action_api_resp_restart])
+ idrac_default_args.update({"reset_to_default": "CustomDefaults", "custom_defaults_buffer": self.custom_default_content})
+ idrac_default_args.update({"wait_for_idrac": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ msg_resp, resp = reset_obj.execute()
+ assert msg_resp['msg'] == IDRAC_RESET_RESET_TRIGGER_MSG
- def test_run_idrac_reset_success_case01(self, idrac_reset_connection_mock, idrac_default_args):
- f_module = self.get_module_mock(params=idrac_default_args)
- result = self.module.run_idrac_reset(idrac_reset_connection_mock, f_module)
- assert result == idrac_reset_connection_mock.config_mgr.reset_idrac()
+ # Scenario - When reset_to_default is passed and iDRAC8 and check_mode True
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="2.81.81")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=False)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(None, False))
+ idrac_default_args.update({"reset_to_default": "CustomDefaults"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == CHANGES_NOT_FOUND
+
+ # Scenario - When reset_to_default is passed and iDRAC8 and check_mode False
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="2.81.81")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(None, False))
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=False)
+ idrac_default_args.update({"reset_to_default": "CustomDefaults"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == RESET_TO_DEFAULT_ERROR_MSG.format(reset_to_default='CustomDefaults')
+
+ # Scenario - When reset_to_default is CustomDefaults and iDRAC9 firmware version not supported
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="6.99.99")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(allowed_values_without_cd, True))
+ mocker.patch(MODULE_PATH + CHECK_LC_STATUS, return_value=None)
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=False)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == CUSTOM_ERROR.format(reset_to_default="CustomDefaults",
+ supported_values=allowed_values_without_cd)
+
+ # Scenario - When reset_to_default is passed and iDRAC9 and check_mode True
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.60")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(allowed_values, False))
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ idrac_default_args.update({"reset_to_default": "CustomDefaults", "custom_defaults_buffer": self.custom_default_content})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == CHANGES_FOUND
- def test_run_idrac_reset_status_success_case02(self, idrac_reset_connection_mock, idrac_default_args):
+ # Scenario - When reset_to_default is passed and iDRAC9 with firmware version not supported and check_mode True
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="6.81.81")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ idrac_default_args.update({"reset_to_default": "CustomDefaults", "custom_defaults_buffer": self.custom_default_content})
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
- result = self.module.run_idrac_reset(idrac_reset_connection_mock, f_module)
- assert result == {'Message': 'Changes found to commit!', 'Status': 'Success', 'changes_applicable': True}
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ with pytest.raises(Exception) as exc:
+ reset_obj.execute()
+ assert exc.value.args[0] == CHANGES_NOT_FOUND
- @pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
- ConnectionError, HTTPError])
- def test_main_exception_handling_case(self, exc_type, mocker, idrac_reset_connection_mock, idrac_default_args):
+ # Scenario - When reset_to_default is 'CustomDefaults' and iDRAC9 and custom_defaults_file is passed
json_str = to_text(json.dumps({"data": "out"}))
- if exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset', side_effect=exc_type('test'))
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ mocker.patch(MODULE_PATH + CHECK_LC_STATUS, return_value=None)
+ mocker.patch(MODULE_PATH + "Validation.validate_path", return_value=None)
+ mocker.patch(MODULE_PATH + "Validation.validate_file_format", return_value=None)
+ mocker.patch(MODULE_PATH + "Validation.validate_custom_option", return_value=None)
+ mocker.patch(MODULE_PATH + 'open', mocker.mock_open(read_data=self.custom_default_content))
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, side_effect=[(allowed_values, True), (allowed_values, True)])
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj2, obj, HTTPError("https://test.com",
+ 401, HTTP_ERROR_MSG, {"accept-type": RETURN_TYPE},
+ StringIO(json_str))])
+ mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=[self.validate_allowed_values, self.validate_allowed_values,
+ self.validate_allowed_values, self.lc_status_api_links, self.action_api_resp_restart])
+ idrac_default_args.update({"reset_to_default": "CustomDefaults", "custom_defaults_file": FILE_PATH})
+ idrac_default_args.update({"wait_for_idrac": True})
+ idrac_default_args.update({"job_wait_timeout": 300})
+ idrac_default_args.update({"force_reset": True})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ msg_resp, resp = reset_obj.execute()
+ assert msg_resp['msg'] == IDRAC_RESET_SUCCESS_MSG
+
+ # Scenario: Failure - when reset_to_default is passed as 'ResetAllWithRootDefaults' for idrac9 with job_wait set to True
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) > 2 and args[2] == 'Links':
+ return self.lc_status_api_links
+ elif len(args) > 2 and args[2] == 'Actions':
+ return self.validate_allowed_values
+ return self.action_api_resp
+ obj.status_code = 400
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + CHECK_LC_STATUS, return_value=None)
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(allowed_values, True))
+ mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj])
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ idrac_default_args.update({"reset_to_default": "ResetAllWithRootDefaults"})
+ idrac_default_args.update({"wait_for_idrac": False})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ reset_obj = self.module.FactoryReset(idrac_connection_reset_mock, f_module, allowed_choices=allowed_values)
+ msg_resp, resp = reset_obj.execute()
+ assert msg_resp['msg'] == FAILED_RESET_MSG
+
+ def test_idrac_reset_main_positive_case(self, idrac_default_args,
+ idrac_connection_reset_mock, mocker):
+ # Scenario - When reset_to_default is passed and successful
+ msg_resp = {'msg': "Success", 'changed': True}
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + EXECUTE_KEY, return_value=(msg_resp, {}))
+ data = self._run_module(idrac_default_args)
+ assert data['msg'] == "Success"
+
+ # Scenario - When reset_to_default is passed and Failed
+ msg_resp = {'msg': "Failure", 'changed': False}
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + EXECUTE_KEY, return_value=(msg_resp, {}))
+ data = self._run_module(idrac_default_args)
+ assert data['msg'] == "Failure" and data['failed'] is True
+
+ # Scenario - When reset_to_default is None and successful
+ msg_resp = {'msg': "Success", 'changed': True}
+ output = {
+ "reset_status": {
+ "idracreset": {
+ "Data": {
+ "StatusCode": 204
+ },
+ "Message": "Success",
+ "Status": "Success",
+ "StatusCode": 204,
+ "retval": True
+ }
+ }
+ }
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + EXECUTE_KEY, return_value=(msg_resp, output))
+ data = self._run_module(idrac_default_args)
+ assert data['msg'] == "Success" and data['reset_status'] == output
+
+ # Scenario - When reset_to_default is None and Failed
+ output['reset_status']['idracreset']['Message'] = "Failure"
+ output['reset_status']['idracreset']['Status'] = "Failure"
+ output['reset_status']['idracreset']['StatusCode'] = 404
+ msg_resp = {'msg': "Failure", 'changed': False}
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + EXECUTE_KEY, return_value=(msg_resp, output))
+ data = self._run_module(idrac_default_args)
+ assert data['msg'] == "Failure" and data['reset_status'] == output
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_reset_main_exception_handling_case(self, exc_type, idrac_default_args,
+ idrac_connection_reset_mock, mocker):
+ json_str = to_text(json.dumps({"data": "out"}))
+ mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + EXECUTE_KEY,
+ side_effect=exc_type('https://testhost.com', 400,
+ HTTP_ERROR_MSG,
+ {"accept-type": RETURN_TYPE},
+ StringIO(json_str)))
else:
- mocker.patch(MODULE_PATH + 'idrac_reset.run_idrac_reset',
- side_effect=exc_type('https://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
- if not exc_type == URLError:
- result = self._run_module_with_fail_json(idrac_default_args)
- assert result['failed'] is True
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version",
+ side_effect=exc_type('test'))
+ result = self._run_module(idrac_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
else:
- result = self._run_module(idrac_default_args)
+ assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py
new file mode 100644
index 000000000..a28aab255
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py
@@ -0,0 +1,590 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.2.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+
+from io import StringIO
+import json
+
+
+from urllib.error import HTTPError, URLError
+import pytest
+from mock import MagicMock
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_session
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import AnsibleFailJSonException
+from ansible.module_utils.urls import SSLValidationError
+from ansible.module_utils._text import to_text
+
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_session.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+REDFISH = "/redfish/v1"
+SESSIONS = "Sessions"
+ODATA = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+
+SESSION_URL = "/redfish/v1/SessionService/Sessions"
+GET_SESSION_URL = "Session.get_session_url"
+
+CREATE_SUCCESS_MSG = "The session has been created successfully."
+DELETE_SUCCESS_MSG = "The session has been deleted successfully."
+FAILURE_MSG = "Unable to '{operation}' a session."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_CHANGES_FOUND_MSG = "No changes found to be applied."
+HTTPS_PATH = "https://testhost.com"
+HTTP_ERROR = "http error message"
+APPLICATION_JSON = "application/json"
+
+
+class TestSession(FakeAnsibleModule):
+ """
+ Main class for testing the idrac_session module.
+ """
+ module = idrac_session
+
+ @pytest.fixture
+ def idrac_session_mock(self):
+ """
+ Creates a mock object for the `idrac_session` fixture.
+
+ This function uses the `MagicMock` class from the `unittest.mock` module to create a mock
+ object. The mock object is then returned by the function.
+
+ Returns:
+ MagicMock: A mock object representing the `idrac_session`.
+ """
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_session_mock(self, mocker, idrac_session_mock):
+ """
+ Returns a mock object for the `SessionAPI` class from the `MODULE_PATH` module.
+ The mock object is initialized with the `idrac_session_mock` as the return value.
+ The `__enter__` method of the mock object is also mocked to return `idrac_session_mock`.
+
+ :param mocker: The pytest fixture for mocking objects.
+ :type mocker: pytest_mock.plugin.MockerFixture
+ :param idrac_session_mock: The mock object for the `idrac_session_mock`.
+ :type idrac_session_mock: Any
+ :return: The mock object for the `SessionAPI` class.
+ :rtype: MagicMock
+ """
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'SessionAPI',
+ return_value=idrac_session_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_session_mock
+ return idrac_conn_mock
+
+ def test_get_session_url(self, idrac_default_args, idrac_connection_session_mock, mocker):
+ """
+ Test the `get_session_url` method of the `Session` class.
+
+ This test function mocks the `get_dynamic_uri` function to return a dictionary
+ containing the session URL. It then creates a `f_module` object with the
+ `idrac_default_args` and `check_mode` set to `False`. It initializes a
+ `session_obj` with the `idrac_connection_session_mock` and `f_module`.
+ Finally, it calls the `get_session_url` method on the `session_obj` and
+ asserts that the returned session URL is equal to the `SESSION_URL` constant.
+
+ Args:
+ self (TestGetSessionUrl): The test case object.
+ idrac_default_args (dict): The default arguments for the IDRAC connection.
+ idrac_connection_session_mock (MagicMock): The mock object for the IDRAC
+ connection session.
+ mocker (MagicMock): The mocker object for mocking functions and modules.
+
+ Returns:
+ None
+ """
+ v1_resp = {'Links': {'Sessions': {'@odata.id': SESSION_URL}}}
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ return_value=v1_resp)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ session_obj = self.module.Session(
+ idrac_connection_session_mock, f_module)
+ sessions_url = session_obj.get_session_url()
+ assert sessions_url == SESSION_URL
+
+
+class TestCreateSession(FakeAnsibleModule):
+ """
+ Main class for testing the create_session module.
+ """
+ module = idrac_session
+
+ @pytest.fixture
+ def create_session_mock(self):
+ """
+ Creates a mock object for the `idrac_session` fixture.
+
+ This function is a pytest fixture that creates a mock object of type `MagicMock` and
+ assigns it to the variable `idrac_obj`. The `idrac_obj` mock object is then returned
+ by the fixture.
+
+ Returns:
+ MagicMock: A mock object representing the `idrac_session` fixture.
+ """
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_session_mock(self, mocker, create_session_mock):
+ """
+ Creates a fixture for mocking the IDRAC connection session.
+
+ This fixture uses the `mocker` fixture from the `pytest` library to patch the
+ `SessionAPI` class from the `MODULE_PATH` module. It returns a mock object of the
+ `SessionAPI` class with the `create_session_mock` object as the return value.
+ The `__enter__` method of the mock object is also patched to return the
+ `create_session_mock` object.
+
+ Parameters:
+ - `self` (TestCase): The test case instance.
+ - `mocker` (MockerFixture): The `mocker` fixture from the `pytest` library.
+ - `create_session_mock` (Mock): The mock object representing the `create_session_mock`.
+
+ Returns:
+ - `idrac_conn_mock` (MagicMock): The mock object of the `SessionAPI` class.
+ """
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'SessionAPI',
+ return_value=create_session_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = create_session_mock
+ return idrac_conn_mock
+
+ def test_session_operation(self, idrac_default_args, idrac_connection_session_mock):
+ """
+ Test the session operation of the module.
+
+ Args:
+ idrac_default_args (dict): The default arguments for the IDRAC connection.
+ idrac_connection_session_mock (MagicMock): The mock object for the IDRAC
+ connection session.
+
+ Returns:
+ None
+
+ This function tests the session operation of the module by creating a session and deleting
+ a session.
+ It updates the `idrac_default_args` dictionary with the appropriate state parameter and
+ creates a `f_module` object with the updated arguments. It then creates a
+ `session_operation_obj` object using the `CreateSession` class of the module and asserts
+ that it is an instance of `CreateSession`.
+ It repeats the same process for deleting a session by updating the `idrac_default_args`
+ dictionary with the state parameter set to "absent" and creating a `session_operation_obj`
+ object using the
+ `DeleteSession` class of the module. It asserts that it is an instance of `DeleteSession`.
+ """
+ idrac_default_args.update({"state": "present"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ session_operation_obj = self.module.CreateSession(idrac_connection_session_mock, f_module)
+ assert isinstance(session_operation_obj, self.module.CreateSession)
+
+ idrac_default_args.update({"state": "absent"})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ session_operation_obj = self.module.DeleteSession(idrac_connection_session_mock, f_module)
+ assert isinstance(session_operation_obj, self.module.DeleteSession)
+
+ def test_create_session_failure(self, idrac_connection_session_mock, mocker):
+ """
+ Test the failure scenario of creating a session.
+
+ Args:
+ idrac_connection_session_mock (MagicMock): A mock object for the
+ idrac_connection_session.
+ mocker (MockerFixture): A fixture for mocking objects.
+
+ Returns:
+ None
+
+ This test function creates a session object using the `idrac_connection_session_mock` and
+ `f_module` objects.
+ It sets the `session_obj.get_session_url` to return a session URL.
+ It sets the `f_module.check_mode` to False and `f_module.params` to a dictionary containing
+ the username and password.
+ It mocks the `idrac_connection_session_mock.invoke_request` method to return a response
+ with a status code of 201.
+ It calls the `session_obj.execute()` method to create the session.
+ It asserts that the `f_module.exit_json` method is called once with the message "Unable to
+ 'create' a session." and `failed` set to True.
+ """
+ f_module = MagicMock()
+ session_obj = idrac_session.CreateSession(
+ idrac_connection_session_mock, f_module)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ f_module.check_mode = False
+ f_module.params = {
+ "username": "admin",
+ "password": "password"
+ }
+ response_mock = MagicMock()
+ response_mock.status_code = 201
+ mocker.patch.object(idrac_connection_session_mock.return_value, 'invoke_request',
+ return_value=response_mock)
+
+ session_obj.execute()
+ f_module.exit_json.assert_called_once_with(
+ msg="Unable to 'create' a session.",
+ failed=True
+ )
+
+ def test_create_session_check_mode(self, idrac_connection_session_mock):
+ """
+ Test the create session functionality in check mode.
+
+ Args:
+ idrac_connection_session_mock (MagicMock): A mock object for the IDRAC connection
+ session.
+
+ Returns:
+ None
+
+ This function tests the create session functionality in check mode. It creates an instance
+ of the `CreateSession` class with the provided `idrac_connection_session_mock` and a mock
+ `f_module` object.
+ It sets the required parameters for the `f_module` object and mocks the `get_session_url`
+ method of the `session_obj` to return the session URL. It also mocks the `exit_json` method
+ of the `f_module` object.
+
+ Finally, it calls the `execute` method of the `session_obj` to execute the create session
+ functionality in check mode.
+
+ Note:
+ This function assumes that the necessary imports and setup for the test are already
+ done.
+ """
+ f_module = MagicMock()
+ session_obj = idrac_session.CreateSession(
+ idrac_connection_session_mock, f_module)
+ f_module = self.get_module_mock(
+ params={"session_id": "1234", "hostname": "X.X.X.X"}, check_mode=True)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ f_module.exit_json = MagicMock()
+
+ session_obj.execute()
+
+ def test_create_session_success(self, idrac_connection_session_mock):
+ """
+ Test the successful creation of a session.
+
+ Args:
+ idrac_connection_session_mock (MagicMock): A mock object representing the IDRAC
+ connection session.
+
+ This test case verifies the successful creation of a session by mocking the necessary
+ objects and invoking the `execute()` method of the `CreateSession` class. It sets the
+ parameters for the `f_module` object, initializes the `session_obj` with the mocked
+ `idrac_connection_session_mock` and `f_module`, and mocks the necessary methods and
+ attributes of the `idrac` object. It then asserts that the `exit_json` method of the
+ `f_module` object is called with the expected arguments.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"username": "admin", "password": "password"}, check_mode=False)
+ session_obj = idrac_session.CreateSession(idrac_connection_session_mock, f_module)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ session_obj.idrac.invoke_request.return_value.status_code = 201
+ session_obj.idrac.invoke_request.return_value.json_data = {"SessionID": "123456"}
+ session_obj.idrac.invoke_request.return_value.headers.get.return_value = "token123"
+ f_module.exit_json = MagicMock()
+
+ session_obj.execute()
+ f_module.exit_json.assert_called_once_with(
+ msg=CREATE_SUCCESS_MSG,
+ changed=True,
+ session_data={"SessionID": "123456"},
+ x_auth_token="token123"
+ )
+
+
+class TestDeleteSession(FakeAnsibleModule):
+ """
+ Main class for testing the delete session module.
+ """
+ module = idrac_session
+
+ @pytest.fixture
+ def idrac_session_mock(self):
+ """
+ Creates a mock object for the `idrac_session` fixture.
+
+ This function uses the `MagicMock` class from the `unittest.mock` module to create a mock
+ object.
+ The mock object is then returned by the function.
+
+ Returns:
+ MagicMock: A mock object representing the `idrac_session`.
+ """
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_session_mock(self, mocker, idrac_session_mock):
+ """
+ Returns a mocked instance of the SessionAPI class from the specified module path.
+ The mocked instance is created using the `mocker.patch` function. The `idrac_session_mock`
+ parameter is passed as the return value of the mocked instance. The `__enter__` method
+ of the mocked instance is also mocked to return the `idrac_session_mock`.
+ :param mocker: The mocker fixture provided by the pytest framework.
+ :type mocker: _pytest.monkeypatch.MonkeyPatch
+ :param idrac_session_mock: The mocked instance of the idrac session.
+ :type idrac_session_mock: Any
+ :return: The mocked instance of the SessionAPI class.
+ :rtype: MagicMock
+ """
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'SessionAPI',
+ return_value=idrac_session_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_session_mock
+ return idrac_conn_mock
+
+ def test_delete_session_success_check_mode_changes(self, idrac_connection_session_mock):
+ """
+ Test the `delete_session_success_check_mode_changes` method of the `DeleteSession` class.
+
+ This method is responsible for testing the success case when the `delete_session` method
+ is called in check mode.
+ It verifies that the `exit_json` method of the `f_module` object is called with the
+ appropriate arguments when the session is successfully deleted.
+
+ Parameters:
+ - idrac_connection_session_mock (MagicMock): A mock object representing the
+ `idrac_connection_session` object.
+
+ Returns:
+ None
+ """
+ f_module = MagicMock()
+ delete_session_obj = idrac_session.DeleteSession(idrac_connection_session_mock, f_module)
+ delete_session_obj.idrac.invoke_request.return_value.status_code = 200
+ delete_session_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=CHANGES_FOUND_MSG, changed=True)
+
+ def test_delete_session_success_check_mode_no_changes(self, idrac_connection_session_mock):
+ """
+ Test the success case of deleting a session in check mode when no changes are expected.
+
+ Args:
+ idrac_connection_session_mock (MagicMock): A mock object representing the IDRAC
+ connection session.
+
+ This function tests the scenario where the deletion of a session is successful in check
+ mode and no changes are expected. It sets up the necessary mock objects and asserts that
+ the `exit_json` method of the `f_module` object is called once with the `msg` parameter
+ set to `NO_CHANGES_FOUND_MSG`.
+
+ Returns:
+ None
+ """
+ f_module = MagicMock()
+ delete_session_obj = idrac_session.DeleteSession(idrac_connection_session_mock, f_module)
+ delete_session_obj.idrac.invoke_request.return_value.status_code = 201
+ delete_session_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=NO_CHANGES_FOUND_MSG)
+
+ def test_delete_session_success(self, idrac_connection_session_mock):
+ """
+ Test the successful deletion of a session.
+
+ This test function verifies the behavior of the `DeleteSession` class when a session is
+ successfully deleted. It mocks the `idrac_connection_session_mock` object and sets up the
+ necessary parameters for the `f_module` object. It then creates an instance of the
+ `DeleteSession` class with the mocked `idrac_connection_session_mock` and the
+ `f_module` object.
+
+ The `get_session_url` method of the `session_obj` is mocked to return a specific session
+ URL. The `invoke_request` method of the `idrac` object of the `session_obj` is also mocked
+ to return a response with a status code of 200. The `exit_json` method of the `f_module`
+ object is mocked as well.
+
+ The `execute` method of the `session_obj` is called to execute the deletion of the session.
+ Finally, the `exit_json` method of the `f_module` object is asserted to have been called
+ with the expected arguments, including the success message and the changed flag set to
+ `True`.
+
+ Parameters:
+ - idrac_connection_session_mock (MagicMock): A mocked object representing the
+ `idrac_connection_session_mock` object.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "1234", "hostname": "X.X.X.X"}, check_mode=False)
+ session_obj = idrac_session.DeleteSession(idrac_connection_session_mock, f_module)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ session_obj.idrac.invoke_request.return_value.status_code = 200
+ f_module.exit_json = MagicMock()
+ session_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=DELETE_SUCCESS_MSG, changed=True)
+
+ def test_delete_session_check_mode_false_no_changes(self, idrac_connection_session_mock):
+ """
+ Test the scenario where the delete session is executed in check mode with `check_mode` set
+ to False and no changes are expected.
+
+ Args:
+ idrac_connection_session_mock (MagicMock): A mock object representing the IDRAC
+ connection session.
+
+ Returns:
+ None
+
+ This function creates a mock module object with the specified parameters and
+ initializes the `DeleteSession` object with the mock IDRAC connection and module. It sets
+ the `get_session_url` method of the session object to return a specific session URL. It
+ sets the status code of the invoke request to 201. It then asserts that the `exit_json`
+ method of the module object is called once with the `msg` parameter set to the
+ `NO_CHANGES_FOUND_MSG` constant.
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "1234", "hostname": "X.X.X.X"}, check_mode=False)
+ session_obj = idrac_session.DeleteSession(idrac_connection_session_mock, f_module)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ session_obj.idrac.invoke_request.return_value.status_code = 201
+ f_module.exit_json = MagicMock()
+ session_obj.execute()
+ f_module.exit_json.assert_called_once_with(msg=NO_CHANGES_FOUND_MSG)
+
+ def test_delete_session_http_error(self, idrac_connection_session_mock):
+ """
+ Test the behavior of the `DeleteSession` class when an HTTP error occurs during the
+ deletion of a session.
+
+ This test case creates a mock `f_module` object with the necessary parameters and
+ initializes a `DeleteSession` object with the mock `idrac_connection_session_mock` and the
+ `f_module` object. It then sets up the necessary mock functions and side effects to
+ simulate an HTTP error during the deletion of a session. Finally, it executes the
+ `execute()` method of the `DeleteSession` object and asserts that an
+ `AnsibleFailJSonException` is raised with the expected failure message and error
+ information.
+
+ Parameters:
+ - idrac_connection_session_mock (MagicMock): A mock object representing the
+ `idrac_connection_session_mock` parameter.
+
+ Raises:
+ - AssertionError: If the expected failure message or error information is not present
+ in the raised exception.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "1234", "hostname": "X.X.X.X"}, check_mode=False)
+ session_obj = idrac_session.DeleteSession(idrac_connection_session_mock, f_module)
+ session_obj.get_session_url = MagicMock(return_value=SESSION_URL)
+ session_obj.get_session_status = MagicMock(return_value=200)
+ json_str = to_text(json.dumps({"data": "out"}))
+ session_obj.idrac.invoke_request.side_effect = HTTPError(HTTPS_PATH, 200,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str))
+ try:
+ session_obj.execute()
+ except AnsibleFailJSonException as ex:
+ assert ex.fail_msg == "Unable to 'delete' a session."
+ assert ex.fail_kwargs == {'error_info': {'data': 'out'}, 'failed': True}
+
+
+class TestMain(FakeAnsibleModule):
+ """
+ Class for testing the main.
+ """
+ module = idrac_session
+
+ @pytest.fixture
+ def idrac_session_mock(self):
+ """
+ Creates a mock object for the `idrac_session` fixture.
+
+ This function uses the `MagicMock` class from the `unittest.mock` module to create a mock
+ object.
+ The mock object is then returned by the function.
+
+ Returns:
+ MagicMock: A mock object representing the `idrac_session`.
+ """
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_session_mock(self, mocker, idrac_session_mock):
+ """
+ Returns a mock object for the `SessionAPI` class from the `MODULE_PATH` module.
+ The mock object is initialized with the `idrac_session_mock` as the return value.
+ The `__enter__` method of the mock object is also mocked to return `idrac_session_mock`.
+
+ :param mocker: The pytest fixture for mocking objects.
+ :type mocker: pytest_mock.plugin.MockerFixture
+ :param idrac_session_mock: The mock object for the `idrac_session_mock`.
+ :type idrac_session_mock: Any
+ :return: The mock object for the `SessionAPI` class.
+ :rtype: MagicMock
+ """
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'SessionAPI',
+ return_value=idrac_session_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_session_mock
+ return idrac_conn_mock
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError,
+ TypeError, ValueError])
+ def test_idrac_session_main_exception_handling_case(self, exc_type, ome_default_args, mocker):
+ """
+ Test the exception handling of the `idrac_session_main` module.
+
+ This function tests the exception handling of the `idrac_session_main` module by mocking
+ different exceptions and verifying the expected behavior.
+
+ Parameters:
+ - exc_type (Exception): The type of exception to be raised.
+ - ome_default_args (dict): The default arguments for the module.
+ - mocker (MockerFixture): The mocker fixture for mocking functions.
+
+ Returns:
+ None
+
+ Raises:
+ AssertionError: If the expected result does not match the actual result.
+
+ Notes:
+ - The function uses the `pytest.mark.parametrize` decorator to parameterize the test
+ cases.
+ - The `exc_type` parameter represents the type of exception to be raised.
+ - The `ome_default_args` parameter contains the default arguments for the module.
+ - The `mocker` parameter is used to mock functions and simulate different exceptions.
+ - The function calls the `_run_module` method with the `ome_default_args` to execute
+ the module.
+ - The function verifies the expected result based on the raised exception type.
+
+ """
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "CreateSession.get_session_url",
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ else:
+ ome_default_args.update({"state": "absent", "session_id": "1234",
+ "auth_token": "token123"})
+ mocker.patch(MODULE_PATH + "DeleteSession.get_session_url",
+ side_effect=exc_type('test'))
+ result = self._run_module(ome_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ else:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py
new file mode 100644
index 000000000..3cdf742d2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py
@@ -0,0 +1,1178 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.0.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+import json
+import pytest
+from io import StringIO
+from ansible.module_utils._text import to_text
+from urllib.error import HTTPError, URLError
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible_collections.dellemc.openmanage.plugins.modules import idrac_storage_volume
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from mock import MagicMock
+from copy import deepcopy
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.idrac_storage_volume.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+
+SYSTEMS_URI = "/redfish/v1/Systems"
+iDRAC_JOB_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/{job_id}"
+CONTROLLER_NOT_EXIST_ERROR = "Specified Controller {controller_id} does not exist in the System."
+CONTROLLER_NOT_DEFINED = "Controller ID is required."
+SUCCESSFUL_OPERATION_MSG = "Successfully completed the {operation} storage volume operation."
+DRIVES_NOT_EXIST_ERROR = "No Drive(s) are attached to the specified Controller Id: {controller_id}."
+DRIVES_NOT_MATCHED = "Following Drive(s) {specified_drives} are not attached to the specified Controller Id: {controller_id}."
+NEGATIVE_OR_ZERO_MSG = "The value for the `{parameter}` parameter cannot be negative or zero."
+NEGATIVE_MSG = "The value for the `{parameter}` parameter cannot be negative."
+INVALID_VALUE_MSG = "The value for the `{parameter}` parameter is invalid."
+ID_AND_LOCATION_BOTH_DEFINED = "Either id or location is allowed."
+ID_AND_LOCATION_BOTH_NOT_DEFINED = "Either id or location should be specified."
+DRIVES_NOT_DEFINED = "Drives must be defined for volume creation."
+NOT_ENOUGH_DRIVES = "Number of sufficient disks not found in Controller '{controller_id}'!"
+WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+JOB_TRIGERRED = "Successfully triggered the {0} storage volume operation."
+VOLUME_NAME_REQUIRED_FOR_DELETE = "Virtual disk name is a required parameter for remove virtual disk operations."
+VOLUME_NOT_FOUND = "Unable to find the virtual disk."
+CHANGES_NOT_FOUND = "No changes found to commit!"
+CHANGES_FOUND = "Changes found to commit!"
+ODATA_ID = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+ATTRIBUTE = "</Attribute>"
+VIEW_OPERATION_FAILED = "Failed to fetch storage details."
+VIEW_CONTROLLER_DETAILS_NOT_FOUND = "Failed to find the controller {controller_id}."
+VIEW_OPERATION_CONTROLLER_NOT_SPECIFIED = "Controller identifier parameter is missing."
+VIEW_VIRTUAL_DISK_DETAILS_NOT_FOUND = "Failed to find the volume : {volume_id} in controller : {controller_id}."
+SUCCESS_STATUS = "Success"
+FAILED_STATUS = "Failed"
+CONTROLLER_BATTERY = "Battery.Integrated.1:RAID.SL.5-1"
+CONTROLLER_ID_FIRST = "AHCI.Embedded.1-1"
+CONTROLLER_ID_SECOND = "AHCI.Embedded.1-2"
+CONTROLLER_ID_THIRD = "AHCI.Embedded.1-3"
+CONTROLLER_ID_FOURTH = "RAID.SL.5-1"
+CONTROLLER_ID_FIFTH = "RAID.SL.5-3"
+SYSTEM = 'System.Embedded.1'
+ENCLOSURE_ID = 'Enclosure.Internal.0-1:RAID.SL.5-1'
+PHYSICAL_DISK_FIRST = 'Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-1'
+PHYSICAL_DISK_SECOND = 'Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-3'
+VIRTUAL_DISK_FIRST = 'Disk.Virtual.0:RAID.SL.5-1'
+VIRTUAL_DISK_SECOND = 'Disk.Virtual.1:RAID.SL.5-1'
+ALL_STORAGE_DATA_METHOD = "StorageData.all_storage_data"
+FETCH_STORAGE_DATA_METHOD = "StorageData.fetch_storage_data"
+FILTER_DISK = 'StorageCreate.filter_disk'
+VIEW_EXECUTE = 'StorageView.execute'
+DATA_XML = '<Data></Data>'
+REDFISH = "/redfish/v1"
+API_INVOKE_MOCKER = "iDRACRedfishAPI.invoke_request"
+BASE_IDRAC_API = "/redfish/v1/Chassis/System.Embedded.1"
+
+
+class TestStorageData(FakeAnsibleModule):
+ module = idrac_storage_volume
+ storage_controllers = {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage"
+ }
+ volumes_list = [
+ {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Volumes/Disk.Virtual.0:RAID.SL.5-1"
+ },
+ {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Volumes/Disk.Virtual.1:RAID.SL.5-1"
+ }]
+ controllers_list = {
+ "Members": [
+ {
+ "Controllers": {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Controllers"
+ },
+ "Drives": [
+ {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Drives/Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-1"
+ }
+ ],
+ "Id": CONTROLLER_ID_FOURTH,
+ "Links": {
+ "Enclosures": [
+ {
+ ODATA_ID: "/redfish/v1/Chassis/Enclosure.Internal.0-1:RAID.SL.5-1"
+ }
+ ]
+ },
+ "Volumes": {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Volumes"
+ },
+ "Oem": {
+ "Dell": {
+ "DellControllerBattery": {
+ "Id": CONTROLLER_BATTERY
+ }}
+ }
+ },
+ {
+ "Drives": [
+ {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/CPU.1/Drives/Disk.Bay.23:Enclosure.Internal.0-3"
+ }
+ ],
+ "Drives@odata.count": 1,
+ "Id": "CPU.1",
+ "Links": {
+ "Enclosures": [
+ {
+ ODATA_ID: "/redfish/v1/Chassis/Enclosure.Internal.0-3"
+ }
+ ],
+ },
+ "Volumes": {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/CPU.1/Volumes"
+ }
+ },
+ {
+ "Controllers": {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/AHCI.Embedded.1-1/Controllers"
+ },
+ "Drives": [],
+ "Id": CONTROLLER_ID_FIRST,
+ "Links": {
+ "Enclosures": [
+ {
+ ODATA_ID: BASE_IDRAC_API
+ }
+ ]
+ },
+ "Volumes": {
+ ODATA_ID: "/redfish/v1/Systems/System.Embedded.1/Storage/AHCI.Embedded.1-1/Volumes"
+ }
+ }
+ ]
+ }
+
+ storage_data = {
+ 'Controllers': {
+ CONTROLLER_ID_FIRST: {
+ 'Controllers': {
+ ODATA_ID: '/redfish/v1/Systems/System.Embedded.1/Storage/AHCI.Embedded.1-1/Controllers',
+ },
+ 'Drives': {},
+ 'Id': CONTROLLER_ID_FIRST,
+ 'Links': {
+ 'Enclosures': {
+ SYSTEM: BASE_IDRAC_API,
+ },
+ },
+ 'Volumes': {},
+ "Oem": {
+ "Dell": {
+ "CPUAffinity": []
+ }
+ }
+ },
+ CONTROLLER_ID_SECOND: {
+ 'Controllers': {
+ ODATA_ID: '/redfish/v1/Systems/System.Embedded.1/Storage/AHCI.Embedded.1-2/Controllers',
+ },
+ 'Drives': {
+ 'Disk.Bay.0:Enclosure.Internal.0-1:AHCI.Embedded.1-2': '/redfish/v1/\
+ Systems/System.Embedded.1/Storage/RAID.SL.5-1/Drives/Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-1',
+ },
+ 'Id': CONTROLLER_ID_SECOND,
+ 'Links': {
+ 'Enclosures': {
+ SYSTEM: BASE_IDRAC_API,
+ },
+ },
+ 'Volumes': {},
+ "Oem": {
+ "Dell": {
+ "CPUAffinity": []
+ }
+ }
+ },
+ CONTROLLER_ID_THIRD: {
+ 'Controllers': {
+ ODATA_ID: '/redfish/v1/Systems/System.Embedded.1/Storage/AHCI.Embedded.1-2/Controllers',
+ },
+ 'Drives': {
+ 'Disk.Bay.0:Enclosure.Internal.0-1:AHCI.Embedded.1-3': '/redfish/v1/\
+ Systems/System.Embedded.1/Storage/AHCI.Embedded.1-3/Drives/Disk.Bay.0:Enclosure.Internal.0-1:AHCI.Embedded.1-3',
+ },
+ 'Id': CONTROLLER_ID_THIRD,
+ 'Links': {
+ 'Enclosures': {
+ ENCLOSURE_ID: {
+ "Links": {
+ "Drives": []
+ }
+ },
+ },
+ },
+ 'Volumes': {},
+ "Oem": {
+ "Dell": {
+ "CPUAffinity": []
+ }
+ }
+ },
+ CONTROLLER_ID_FOURTH: {
+ 'Controllers': {
+ ODATA_ID: '/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-1/Controllers',
+ },
+ 'Drives': {
+ PHYSICAL_DISK_FIRST: '/redfish/v1/Systems\
+ /System.Embedded.1/Storage/RAID.SL.5-1/Drives/Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-1',
+ },
+ 'Id': CONTROLLER_ID_FOURTH,
+ 'Links': {
+ 'Enclosures': {
+ ENCLOSURE_ID: {"Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_FIRST
+ }
+ ]}}
+ },
+ },
+ 'Volumes': {
+ VIRTUAL_DISK_FIRST: {
+ "Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_FIRST
+ }
+ ]
+ },
+ },
+ VIRTUAL_DISK_SECOND: {
+ "Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_FIRST
+ }
+ ]
+ },
+ },
+ },
+ "Oem": {
+ "Dell": {
+ "DellControllerBattery": {
+ "Id": CONTROLLER_BATTERY
+ }}
+ }
+ }
+ }
+ }
+
+ storage_data_expected = {
+ 'Controller': {
+ CONTROLLER_ID_FIRST: {
+ 'ControllerSensor': {
+ CONTROLLER_ID_FIRST: {},
+ },
+ },
+ CONTROLLER_ID_SECOND: {
+ 'ControllerSensor': {
+ CONTROLLER_ID_SECOND: {},
+ },
+ 'PhysicalDisk': [
+ 'Disk.Bay.0:Enclosure.Internal.0-1:AHCI.Embedded.1-2',
+ ],
+ },
+ CONTROLLER_ID_THIRD: {
+ 'ControllerSensor': {
+ CONTROLLER_ID_THIRD: {}
+ },
+ 'Enclosure': {
+ ENCLOSURE_ID: {
+ 'EnclosureSensor': {
+ ENCLOSURE_ID: {},
+ },
+ },
+ },
+ },
+ CONTROLLER_ID_FOURTH: {
+ 'ControllerSensor': {
+ CONTROLLER_ID_FOURTH: {
+ 'ControllerBattery': [
+ 'Battery.Integrated.1:RAID.SL.5-1',
+ ],
+ },
+ },
+ 'Enclosure': {
+ ENCLOSURE_ID: {
+ 'EnclosureSensor': {
+ ENCLOSURE_ID: {},
+ },
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_FIRST,
+ ],
+ },
+ },
+ 'VirtualDisk': {
+ VIRTUAL_DISK_FIRST: {
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_FIRST,
+ ],
+ },
+ VIRTUAL_DISK_SECOND: {
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_FIRST,
+ ],
+ },
+ },
+ },
+ }
+ }
+
+ storage_data_idrac8 = {
+ 'Controllers': {
+ CONTROLLER_ID_FIFTH: {
+ 'Controllers': {
+ ODATA_ID: '/redfish/v1/Systems/System.Embedded.1/Storage/RAID.SL.5-3/Controllers',
+ },
+ 'Drives': {
+ PHYSICAL_DISK_SECOND: '/redfish/v1/Systems\
+ /System.Embedded.1/Storage/RAID.SL.5-3/Drives/Disk.Bay.0:Enclosure.Internal.0-1:RAID.SL.5-3',
+ },
+ 'Id': CONTROLLER_ID_FIFTH,
+ 'Links': {
+ 'Enclosures': {
+ ENCLOSURE_ID: {"Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_SECOND
+ }
+ ]}}
+ },
+ },
+ 'Volumes': {
+ 'Disk.Virtual.0:RAID.SL.5-3': {
+ "Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_SECOND
+ }
+ ]
+ },
+ },
+ 'Disk.Virtual.1:RAID.SL.5-3': {
+ "Links": {
+ "Drives": [
+ {
+ ODATA_ID: PHYSICAL_DISK_SECOND
+ }
+ ]
+ },
+ },
+ },
+ "Oem": {
+ "Dell": {
+ "DellControllerBattery": {
+ "Id": "Battery.Integrated.1:RAID.SL.5-3"
+ }}
+ }
+ }
+ }
+ }
+
+ storage_data_expected_idrac8 = {
+ 'Controller': {
+ CONTROLLER_ID_FIFTH: {
+ 'ControllerSensor': {
+ CONTROLLER_ID_FIFTH: {},
+ },
+ 'Enclosure': {
+ ENCLOSURE_ID: {
+ 'EnclosureSensor': {
+ ENCLOSURE_ID: {},
+ },
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_SECOND,
+ ],
+ },
+ },
+ 'VirtualDisk': {
+ 'Disk.Virtual.0:RAID.SL.5-3': {
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_SECOND,
+ ],
+ },
+ 'Disk.Virtual.1:RAID.SL.5-3': {
+ 'PhysicalDisk': [
+ PHYSICAL_DISK_SECOND,
+ ],
+ },
+ },
+ },
+ }
+ }
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_fetch_controllers_uri(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ return self.storage_controllers
+
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(SYSTEM, ''))
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageData(
+ idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.fetch_controllers_uri()
+ assert data == self.storage_controllers
+
+ # Scenario 2: for error message
+ mocker.patch(MODULE_PATH + "validate_and_get_first_resource_id_uri",
+ return_value=(REDFISH, "Error"))
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageData(
+ idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.fetch_controllers_uri()
+ assert exc.value.args[0] == "Error"
+
+ def test_fetch_api_data(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ key = "Storage"
+ obj = MagicMock()
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageData(idrac_connection_storage_volume_mock, f_module)
+ key_out, uri_data_out = idr_obj.fetch_api_data(self.storage_controllers[ODATA_ID], -1)
+ assert key == key_out
+ assert obj == uri_data_out
+
+ def test_all_storage_data(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ def mock_get_dynamic_uri_request(*args, **kwargs):
+ if len(args) == 3 and args[2] == "Members":
+ return self.volumes_list
+ else:
+ return self.controllers_list
+ mocker.patch(MODULE_PATH + "StorageData.fetch_controllers_uri",
+ return_value=self.storage_controllers)
+ mocker.patch(MODULE_PATH + "get_dynamic_uri",
+ side_effect=mock_get_dynamic_uri_request)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageData(idrac_connection_storage_volume_mock, f_module)
+ storage_info = idr_obj.all_storage_data()
+ assert set(storage_info.keys()) == {'Controllers'}
+ assert set(storage_info["Controllers"].keys()) == {CONTROLLER_ID_FIRST, CONTROLLER_ID_FOURTH}
+
+ def test_fetch_storage_data(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=self.storage_data)
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version",
+ return_value="3.20.00")
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageData(idrac_connection_storage_volume_mock, f_module)
+ storage_info = idr_obj.fetch_storage_data()
+ assert storage_info == self.storage_data_expected
+
+ # Scenario - for idrac 8
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=self.storage_data_idrac8)
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version",
+ return_value="2.00")
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageData(idrac_connection_storage_volume_mock, f_module)
+ storage_info = idr_obj.fetch_storage_data()
+ assert storage_info == self.storage_data_expected_idrac8
+
+
+class TestStorageView(TestStorageData):
+ module = idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_execute(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data_expected)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageView(idrac_connection_storage_volume_mock, f_module)
+ out = idr_obj.execute()
+ assert out == {"Message": TestStorageData.storage_data_expected, "Status": SUCCESS_STATUS}
+
+ # Scenario - When controller_id is passed
+ data_when_controller_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_controller_id_passed)
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_FIRST})
+ out = idr_obj.execute()
+ assert out == {"Message": data_when_controller_id_passed, "Status": SUCCESS_STATUS}
+
+ # Scenario - When invalid controller_id is passed
+ data_when_invlid_controller_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_invlid_controller_id_passed)
+ controller_id = "AHCI.Embedded.1-invalid"
+ idrac_default_args.update({"controller_id": controller_id})
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VIEW_OPERATION_FAILED
+
+ # Scenario - When volume_id and invalid controller_id is passed
+ data_when_invlid_volume_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_invlid_volume_id_passed)
+ idrac_default_args.update({"volume_id": VIRTUAL_DISK_FIRST})
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VIEW_OPERATION_FAILED
+ # VIEW_CONTROLLER_DETAILS_NOT_FOUND.format(controller_id=controller_id)
+
+ # Scenario - When volume_id and valid controller_id is passed
+ data_when_controller_id_and_volume_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_controller_id_and_volume_id_passed)
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_FOURTH, "volume_id": VIRTUAL_DISK_FIRST})
+ out = idr_obj.execute()
+ assert out == {"Message": data_when_controller_id_and_volume_id_passed, "Status": SUCCESS_STATUS}
+
+ # Scenario - When invalid volume_id and valid controller_id is passed
+ data_when_controller_id_and_volume_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_controller_id_and_volume_id_passed)
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_FIRST, "volume_id": VIRTUAL_DISK_FIRST})
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VIEW_OPERATION_FAILED
+
+ # Scenario - When volume_id is passed
+ data_when_volume_id_passed = deepcopy(TestStorageData.storage_data_expected)
+ mocker.patch(MODULE_PATH + FETCH_STORAGE_DATA_METHOD,
+ return_value=data_when_volume_id_passed)
+ del idrac_default_args["controller_id"]
+ idrac_default_args.update({"volume_id": VIRTUAL_DISK_FIRST})
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VIEW_OPERATION_FAILED
+
+
+class TestStorageBase(FakeAnsibleModule):
+ module = idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_module_extend_input(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + 'StorageBase.data_conversion', return_value={})
+ idrac_default_args.update({'span_length': 1, 'span_depth': 1})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.module_extend_input(f_module)
+ # Scenario 1: when volumes is None
+ assert data['volumes'] == [{'drives': {'id': [-1]}}]
+
+ # Scenario 2: when volumes is given
+ idrac_default_args.update({'volumes': [{"drives": {'location': [3]}, 'span_length': '1'}]})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ mocker.patch(MODULE_PATH + 'StorageBase.data_conversion', return_value={"drives": {'location': [3]}, 'span_length': '1'})
+
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.module_extend_input(f_module)
+ assert data['volumes'] == [{"drives": {'location': [3]}, 'span_length': 1}]
+
+ def test_payload_for_disk(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: When drives is given
+ data = idr_obj.payload_for_disk({'drives': {'id': [1, 2]}})
+ assert data == '<Attribute Name="IncludedPhysicalDiskID">1</Attribute><Attribute Name="IncludedPhysicalDiskID">2</Attribute>'
+
+ # Scenario 2: When dedicate_hot_spare is in each_volume
+ data = idr_obj.payload_for_disk({'dedicated_hot_spare': [3, 5]})
+ assert data == '<Attribute Name="RAIDdedicatedSpare">3</Attribute><Attribute Name="RAIDdedicatedSpare">5</Attribute>'
+
+ def test_construct_volume_payloadk(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + 'xml_data_conversion', return_value=DATA_XML)
+ mocker.patch(MODULE_PATH + 'StorageBase.payload_for_disk', return_value='payload_detail_in_xml')
+ # Scenario 1: When state is create
+ idrac_default_args.update({'state': 'create'})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.construct_volume_payload(1, {})
+ assert data == DATA_XML
+
+ def test_constuct_payload(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + 'xml_data_conversion', return_value=DATA_XML)
+ mocker.patch(MODULE_PATH + 'StorageBase.construct_volume_payload', return_value='<Volume></Volume>')
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: Default
+ data = idr_obj.constuct_payload({})
+ assert data == DATA_XML
+
+ # Scenario 2: When raid_reset_config is 'true'
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module_ext_params.update({'raid_reset_config': 'true'})
+ data = idr_obj.constuct_payload({})
+ assert data == DATA_XML
+
+ def test_wait_for_job_completion(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ obj = MagicMock()
+ obj.headers = {'Location': "/joburl/JID12345"}
+ job = {"job_wait": True, "job_wait_timeout": 1200}
+ idrac_default_args.update(job)
+ job_resp_completed = {'JobStatus': 'Completed'}
+ idrac_redfish_resp = (False, 'Job Success', job_resp_completed, 1200)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: Job_wait is True, job_wait_timeout match with default
+ with pytest.raises(Exception) as exc:
+ idr_obj.wait_for_job_completion(obj)
+ assert exc.value.args[0] == WAIT_TIMEOUT_MSG.format(1200)
+
+ # Scenario 2: Job_wait is True, job_wait_timeout less than default
+ idrac_redfish_resp = (False, 'Job Success', job_resp_completed, 1000)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.wait_for_job_completion(obj)
+ assert data == job_resp_completed
+
+ # Scenario 3: Job failed in resp
+ job_resp_failed = {'JobStatus': 'Failed', 'Message': 'Job failed.'}
+ idrac_redfish_resp = (True, 'Job Failed', job_resp_failed, 1000)
+ mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.wait_for_job_completion(obj)
+ assert exc.value.args[0] == 'Job failed.'
+
+ # Scenario 4: Job wait is false
+ obj.json_data = {'JobStatus': 'Running'}
+ mocker.patch(MODULE_PATH + API_INVOKE_MOCKER, return_value=obj)
+ job = {"job_wait": False, "job_wait_timeout": 1200}
+ idrac_default_args.update(job)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ f_module.params.update({'state': 'create'})
+ idr_obj = self.module.StorageBase(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.wait_for_job_completion(obj)
+ assert exc.value.args[0] == JOB_TRIGERRED.format('create')
+
+
+class TestStorageValidation(TestStorageBase):
+ module = idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_validate_controller_exists(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario - when controller_id is not passed
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_controller_exists()
+ assert exc.value.args[0] == CONTROLLER_NOT_DEFINED
+
+ # Scenario - when invalid controller_id is passed
+ controller_id = "AHCI.Embedded.1-invalid"
+ idrac_default_args.update({"controller_id": controller_id})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_controller_exists()
+ assert exc.value.args[0] == CONTROLLER_NOT_EXIST_ERROR.format(controller_id=controller_id)
+
+ # Scenario - when controller_id is passed
+ controller_id = CONTROLLER_ID_FIRST
+ idrac_default_args.update({"controller_id": controller_id})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.validate_controller_exists()
+
+ def test_validate_job_wait_negative_values(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario - when job_wait_timeout is negative
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ idrac_default_args.update({"job_wait": True, "job_wait_timeout": -120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_job_wait_negative_values()
+ assert exc.value.args[0] == NEGATIVE_OR_ZERO_MSG.format(parameter="job_wait_timeout")
+
+ # Scenario - when job_wait_timeout is positive
+ idrac_default_args.update({"job_wait": True, "job_wait_timeout": 120})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.validate_job_wait_negative_values()
+
+ @pytest.mark.parametrize("params", [
+ {"span_depth": -1, "span_length": 2, "capacity": 200, "strip_size": 131072},
+ {"span_depth": 1, "span_length": -1, "capacity": 200, "strip_size": 131072},
+ {"span_depth": 1, "span_length": 2, "capacity": -1, "strip_size": 131072},
+ {"span_depth": 1, "span_length": 2, "capacity": 200, "strip_size": -131072},
+ ])
+ def test_validate_negative_values_for_volume_params(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker, params):
+ # Scenario - when job_wait_timeout is negative
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ # idrac_default_args.update(params)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_negative_values_for_volume_params(params)
+ # TO DO replace job_wait_timeout with key in params which has negative value
+ negative_key = next((k for k, v in params.items() if v < 0), None)
+ assert exc.value.args[0] == NEGATIVE_OR_ZERO_MSG.format(parameter=negative_key)
+
+ def test_validate_negative_values_for_volume_params_with_different_parameter(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario - passing different parameter
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.validate_negative_values_for_volume_params({"volume_type": "RAID 0", "number_dedicated_hot_spare": 0})
+
+ # Scenario - when number_dedicated_hot_spare is negative
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_negative_values_for_volume_params({"number_dedicated_hot_spare": -1})
+ assert exc.value.args[0] == NEGATIVE_MSG.format(parameter="number_dedicated_hot_spare")
+
+ # Scenario - when number_dedicated_hot_spare is not negative
+ idr_obj.validate_negative_values_for_volume_params({"number_dedicated_hot_spare": 0})
+
+ def test_validate_volume_drives(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario - when volume drives are not defined
+ volumes = {
+ "name": "volume_1"
+ }
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_volume_drives(volumes)
+ assert exc.value.args[0] == DRIVES_NOT_DEFINED
+
+ # Scenario - when in volume drives id and location both defined
+ volumes = {
+ "name": "volume_1",
+ "drives": {
+ "id": [
+ PHYSICAL_DISK_FIRST,
+ PHYSICAL_DISK_SECOND
+ ],
+ "location": [7, 3]
+ }
+ }
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_volume_drives(volumes)
+ assert exc.value.args[0] == ID_AND_LOCATION_BOTH_DEFINED
+
+ # Scenario - when in volume drives id and location both not defined
+ volumes = {
+ "name": "volume_1",
+ "drives": {
+ PHYSICAL_DISK_FIRST: {}
+ }
+ }
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_volume_drives(volumes)
+ assert exc.value.args[0] == ID_AND_LOCATION_BOTH_NOT_DEFINED
+
+ # Scenario - when in volume drives id is defined
+ volumes = {
+ "name": "volume_1",
+ "drives": {
+ "id": [
+ PHYSICAL_DISK_FIRST,
+ PHYSICAL_DISK_SECOND
+ ]
+ }
+ }
+ mocker.patch(MODULE_PATH + "StorageValidation.raid_std_validation",
+ return_value=True)
+ out = idr_obj.validate_volume_drives(volumes)
+ assert out is True
+
+ def test_raid_std_validation(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ # Scenario - Invalid span_length
+ params = {"span_depth": 1, "span_length": 4, "pd_count": 2, "volume_type": "RAID 1"}
+ with pytest.raises(Exception) as exc:
+ idr_obj.raid_std_validation(params["span_length"],
+ params["span_depth"],
+ params["volume_type"],
+ params["pd_count"])
+ assert exc.value.args[0] == INVALID_VALUE_MSG.format(parameter="span_length")
+
+ # Scenario - Invalid span_depth for RAID 1
+ params = {"span_depth": 4, "span_length": 2, "pd_count": 3, "volume_type": "RAID 1"}
+ with pytest.raises(Exception) as exc:
+ idr_obj.raid_std_validation(params["span_length"],
+ params["span_depth"],
+ params["volume_type"],
+ params["pd_count"])
+ assert exc.value.args[0] == INVALID_VALUE_MSG.format(parameter="span_depth")
+
+ # Scenario - Invalid span_depth for RAID 10
+ params = {"span_depth": 1, "span_length": 2, "pd_count": 9, "volume_type": "RAID 10"}
+ with pytest.raises(Exception) as exc:
+ idr_obj.raid_std_validation(params["span_length"],
+ params["span_depth"],
+ params["volume_type"],
+ params["pd_count"])
+ assert exc.value.args[0] == INVALID_VALUE_MSG.format(parameter="span_depth")
+
+ # Scenario - Invalid drive count
+ params = {"span_depth": 3, "span_length": 2, "pd_count": 1, "volume_type": "RAID 10"}
+ with pytest.raises(Exception) as exc:
+ idr_obj.raid_std_validation(params["span_length"],
+ params["span_depth"],
+ params["volume_type"],
+ params["pd_count"])
+ assert exc.value.args[0] == INVALID_VALUE_MSG.format(parameter="drives")
+
+ # Scenario - Valid
+ params = {"span_depth": 2, "span_length": 2, "pd_count": 4, "volume_type": "RAID 10"}
+ out = idr_obj.raid_std_validation(params["span_length"],
+ params["span_depth"],
+ params["volume_type"],
+ params["pd_count"])
+ assert out is True
+
+
+class TestStorageCreate(TestStorageBase):
+ module = idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_disk_slot_id_conversion(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ # Scenario 1: location is given in drives
+ volume = {'drives': {'location': [0, 1]}}
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_SECOND})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.disk_slot_location_to_id_conversion(volume)
+ assert data['id'] == TestStorageData.storage_data_expected['Controller'][CONTROLLER_ID_SECOND]['PhysicalDisk']
+
+ # Scenario 2: id is given in drives
+ id_list = ['Disk.Bay.3:Enclosure.Internal.0-1:AHCI.Embedded.1-2',
+ 'Disk.Bay.2:Enclosure.Internal.0-1:AHCI.Embedded.1-2']
+ volume = {'drives': {'id': id_list}}
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_SECOND})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.disk_slot_location_to_id_conversion(volume)
+ assert data['id'] == id_list
+
+ # Scenario 3: When id and location is not given in drives
+ volume = {'drives': {}}
+ data = idr_obj.disk_slot_location_to_id_conversion(volume)
+ assert data == {}
+
+ def test_perform_intersection_on_disk(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario 1: When iDRAC has firmware version greater than 3.00.00.00
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="3.10.00")
+ volume = {'media_type': 'HDD', 'protocol': 'SATA'}
+ healthy_disk, available_disk, media_disk, protocol_disk = {1, 2, 3, 4, 5}, {1, 2, 3, 5}, {2, 3, 4, 5}, {1, 5}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.perform_intersection_on_disk(volume, healthy_disk, available_disk, media_disk, protocol_disk)
+ assert data == [5]
+
+ # Scenario 1: When iDRAC has firmware version less than 3.00.00.00
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="2.00.00")
+ volume = {'media_type': None, 'protocol': None}
+ data = idr_obj.perform_intersection_on_disk(volume, healthy_disk, available_disk, media_disk, protocol_disk)
+ assert data == [1, 2, 3, 4, 5]
+
+ def test_filter_disk(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ drive_resp = {'DriveID1': {'MediaType': 'HDD', 'Protocol': 'SAS', 'Status': {'Health': 'OK'},
+ 'Oem': {'Dell': {'DellPhysicalDisk': {'RaidStatus': 'Ready'}}}},
+ 'DriveID2': {'MediaType': 'SSD', 'Protocol': 'SATA', 'Status': {'Health': 'Not OK'}}}
+ idrac_data = {'Controllers': {CONTROLLER_ID_FIRST: {'Drives': drive_resp}}}
+ # Scenario 1: When iDRAC has firmware version equal to 3.00.00.00
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=idrac_data)
+ mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="3.05.00")
+ volume = {'media_type': 'HDD', 'protocol': 'SAS'}
+ idrac_default_args.update({"controller_id": CONTROLLER_ID_FIRST})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ data = idr_obj.filter_disk(volume)
+ assert data == ['DriveID1']
+
+ def test_updating_drives_module_input_when_given(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: When id is in drives
+ volume = {'drives': {'id': [2, 3, 4, 5]}}
+ filter_disk_output = [1, 3, 5]
+ data = idr_obj.updating_drives_module_input_when_given(volume, filter_disk_output)
+ assert data == [3, 5]
+
+ # Scenario 2: When id is not in drives
+ volume = {'drives': {'location': [2, 3, 4, 5]}}
+ data = idr_obj.updating_drives_module_input_when_given(volume, filter_disk_output)
+ assert data == []
+
+ def test_updating_volume_module_input_for_hotspare(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: number_dedicated_hot_spare is in volume and greator than zero
+ volume = {'number_dedicated_hot_spare': 2}
+ filter_disk_output = [1, 3, 5, 4, 2]
+ reserved_pd = [1]
+ drive_exists_in_id = [3, 5]
+ data = idr_obj.updating_volume_module_input_for_hotspare(volume, filter_disk_output, reserved_pd, drive_exists_in_id)
+ assert data == [4, 2]
+
+ # Scenario 2: number_dedicated_hot_spare is in volume and equal to zero
+ volume = {'number_dedicated_hot_spare': 0}
+ data = idr_obj.updating_volume_module_input_for_hotspare(volume, filter_disk_output, reserved_pd, drive_exists_in_id)
+ assert data == []
+
+ def test_updating_volume_module_input(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ mocker.patch(MODULE_PATH + FILTER_DISK, return_value=[1, 2, 3, 4, 5])
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: When required pd is less than available pd
+ volume = {'volumes': [{'span_depth': 1, 'span_length': 1, 'stripe_size': 65536, 'capacity': 50.45,
+ 'drives': {'id': [2, 3, 4]},
+ 'number_dedicated_hot_spare': 1}]}
+ idr_obj.module_ext_params.update(volume)
+ drive_exists_in_id = [1, 2]
+ idr_obj.updating_volume_module_input(drive_exists_in_id)
+ assert idr_obj.module_ext_params['volumes'][0]['drives']['id'] == [1]
+
+ # Scenario 2: When required pd is less than available pd with check_mode
+ volume = {'volumes': [{'span_depth': 1, 'span_length': 1, 'stripe_size': 65536, 'capacity': 50.45,
+ 'drives': {'id': [2, 3, 4]},
+ 'number_dedicated_hot_spare': 1}]}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module_ext_params.update(volume)
+ drive_exists_in_id = [1, 2]
+ with pytest.raises(Exception) as exc:
+ idr_obj.updating_volume_module_input(drive_exists_in_id)
+ assert exc.value.args[0] == CHANGES_FOUND
+
+ # Scenario 3: When required pd is greater than available pd
+ mocker.patch(MODULE_PATH + FILTER_DISK, return_value=[1])
+ controller_id = 'Qwerty'
+ volume = {'volumes': [{'span_depth': 2, 'span_length': 1,
+ 'drives': {'id': [1]}, 'number_dedicated_hot_spare': 0}],
+ 'controller_id': controller_id}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module_ext_params.update(volume)
+ drive_exists_in_id = [1, 2]
+ with pytest.raises(Exception) as exc:
+ idr_obj.updating_volume_module_input(drive_exists_in_id)
+ assert exc.value.args[0] == NOT_ENOUGH_DRIVES.format(controller_id=controller_id)
+
+ # Scenario 4: When required pd is greater than available pd with check_mode
+ mocker.patch(MODULE_PATH + FILTER_DISK, return_value=[1])
+ controller_id = 'Qwerty'
+ volume = {'volumes': [{'span_depth': 2, 'span_length': 1,
+ 'drives': {'id': [1, 2]}, 'number_dedicated_hot_spare': 0}],
+ 'controller_id': controller_id}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module_ext_params.update(volume)
+ drive_exists_in_id = [1]
+ with pytest.raises(Exception) as exc:
+ idr_obj.updating_volume_module_input(drive_exists_in_id)
+ assert exc.value.args[0] == CHANGES_NOT_FOUND
+
+ def test_validate_create(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ mocker.patch(MODULE_PATH + 'StorageValidation.validate_controller_exists', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageValidation.validate_job_wait_negative_values', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageValidation.validate_negative_values_for_volume_params', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageValidation.validate_volume_drives', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageCreate.updating_volume_module_input', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageCreate.disk_slot_location_to_id_conversion', return_value={'id': []})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ # Scenario 1: When required pd is less than available pd
+ volume = {'volumes': [{'drives': {'location': [2, 3, 4]}},
+ {'drives': {'id': [1]}}]}
+ idr_obj.module_ext_params.update(volume)
+ idr_obj.validate()
+ assert idr_obj.module_ext_params['volumes'][0]['drives']['id'] == []
+
+ def test_execute_create(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ mocker.patch(MODULE_PATH + 'StorageCreate.validate', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageBase.constuct_payload', return_value=None)
+ mocker.patch(MODULE_PATH + 'iDRACRedfishAPI.import_scp', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageBase.wait_for_job_completion', return_value={})
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageCreate(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.controller_id = CONTROLLER_ID_FOURTH
+ data = idr_obj.execute()
+ assert data == {}
+
+
+class TestStorageDelete(TestStorageBase):
+ module = idrac_storage_volume
+
+ @pytest.fixture
+ def idrac_storage_volume_mock(self):
+ idrac_obj = MagicMock()
+ return idrac_obj
+
+ @pytest.fixture
+ def idrac_connection_storage_volume_mock(self, mocker, idrac_storage_volume_mock):
+ idrac_conn_mock = mocker.patch(MODULE_PATH + 'iDRACRedfishAPI',
+ return_value=idrac_storage_volume_mock)
+ idrac_conn_mock.return_value.__enter__.return_value = idrac_storage_volume_mock
+ return idrac_conn_mock
+
+ def test_execute_delete(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ idrac_resp = {'Controllers': {'Cntrl1': {'Volumes': {'Volume_ID1': {'Name': 'Volume Name 1'}}}}}
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=idrac_resp)
+ mocker.patch(MODULE_PATH + 'StorageDelete.validate', return_value=None)
+ mocker.patch(MODULE_PATH + 'iDRACRedfishAPI.import_scp', return_value=None)
+ mocker.patch(MODULE_PATH + 'StorageBase.wait_for_job_completion', return_value={})
+ mocker.patch(MODULE_PATH + 'StorageBase.module_extend_input', return_value={})
+
+ # Scenario 1: When Non existing volume is passed as input
+ volume = {'volumes': [{'name': 'volume-1', 'span_depth': 1, 'span_length': 1},
+ {'name': 'volume-2', 'span_depth': 1, 'span_length': 1}]}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageDelete(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module.params.update(volume)
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VOLUME_NOT_FOUND
+
+ # Scenario 2: When Non existing volume is passed as input with check_mode
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageDelete(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module.params.update(volume)
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == VOLUME_NOT_FOUND
+
+ # Scenario 3: When Existing volume is passed as input
+ volume = {'volumes': [{'name': 'Volume Name 1', 'span_depth': 1, 'span_length': 1}]}
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageDelete(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module.params.update(volume)
+ idr_obj.module_ext_params.update({'state': 'delete', 'volumes': volume})
+ data = idr_obj.execute()
+ assert data == {}
+
+ # Scenario 3: When Existing volume is passed as input with check_mode
+ f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ idr_obj = self.module.StorageDelete(idrac_connection_storage_volume_mock, f_module)
+ idr_obj.module.params.update(volume)
+ idr_obj.module_ext_params.update({'state': 'delete', 'volumes': volume})
+ with pytest.raises(Exception) as exc:
+ idr_obj.execute()
+ assert exc.value.args[0] == CHANGES_FOUND
+
+ def test_idrac_storage_volume_main_positive_case(self, idrac_default_args,
+ idrac_connection_storage_volume_mock, mocker):
+ def returning_none():
+ return None
+ mocker.patch(MODULE_PATH + VIEW_EXECUTE, return_value=returning_none)
+ view = 'view'
+ idrac_default_args.update({'state': view})
+ data = self._run_module(idrac_default_args)
+ assert data['msg'] == SUCCESSFUL_OPERATION_MSG.format(operation=view)
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_idrac_storage_volume_main_exception_handling_case(self, exc_type, idrac_default_args,
+ idrac_connection_storage_volume_mock, mocker):
+
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + VIEW_EXECUTE,
+ side_effect=exc_type('https://testhost.com', 400,
+ 'http error message',
+ {"accept-type": "application/json"},
+ StringIO(json_str)))
+ else:
+ mocker.patch(MODULE_PATH + VIEW_EXECUTE,
+ side_effect=exc_type('test'))
+ result = self._run_module(idrac_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ else:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
index 0ef6e6da3..6087f140f 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_user.py
@@ -22,6 +22,24 @@ from ansible.module_utils._text import to_text
from io import StringIO
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
+VERSION = "3.60.60.60"
+VERSION13G = "2.70.70.70"
+SLOT_API = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/"
+CHANGES_FOUND = "Changes found to commit!"
+SLEEP_PATH = 'idrac_user.time.sleep'
+USERNAME2 = "Users.2#UserName"
+GET_PAYLOAD = "idrac_user.get_payload"
+PAYLOAD_XML = "idrac_user.convert_payload_xml"
+XML_DATA = "<xml-data>"
+USERNAME1 = "Users.1#UserName"
+IMPORT_SCP = "idrac_user.iDRACRedfishAPI.import_scp"
+USER2 = "User.2#UserName"
+SUCCESS_CREATED = "Successfully created a request."
+SUCCESS_MSG = "Successfully created user account."
+SUCCESS_UPDATED = "Successfully updated user account."
+INVOKE_REQUEST = "idrac_user.iDRACRedfishAPI.invoke_request"
+CM_ACCOUNT = "idrac_user.create_or_modify_account"
+USER_PRIVILAGE = "Users.1#Privilege"
class TestIDRACUser(FakeAnsibleModule):
@@ -78,7 +96,7 @@ class TestIDRACUser(FakeAnsibleModule):
"Users.1.ProtocolEnable": idrac_default_args["protocol_enable"],
"Users.1.AuthenticationProtocol": idrac_default_args["authentication_protocol"],
"Users.1.PrivacyProtocol": idrac_default_args["privacy_protocol"]}
- xml_payload, json_payload = self.module.convert_payload_xml(payload)
+ _xml, json_payload = self.module.convert_payload_xml(payload)
assert json_payload["Users.1#SolEnable"] is True
def test_remove_user_account_check_mode_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
@@ -87,12 +105,14 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
"protocol_enable": False, "authentication_protocol": "SHA",
"privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
slot_id = 1
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
+ slot_uri = SLOT_API.format(slot_id)
with pytest.raises(Exception) as exc:
- self.module.remove_user_account(f_module, idrac_connection_user_mock, slot_uri, slot_id)
- assert exc.value.args[0] == "Changes found to commit!"
+ self.module.remove_user_account(
+ f_module, idrac_connection_user_mock, slot_uri, slot_id)
+ assert exc.value.args[0] == CHANGES_FOUND
def test_remove_user_account_check_mode_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
@@ -100,9 +120,11 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
"protocol_enable": False, "authentication_protocol": "SHA",
"privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=True)
with pytest.raises(Exception) as exc:
- self.module.remove_user_account(f_module, idrac_connection_user_mock, None, None)
+ self.module.remove_user_account(
+ f_module, idrac_connection_user_mock, None, None)
assert exc.value.args[0] == "No changes found to commit!"
def test_remove_user_account_check_mode_3(self, idrac_connection_user_mock, idrac_default_args, mocker):
@@ -111,12 +133,15 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
"protocol_enable": False, "authentication_protocol": "SHA",
"privacy_protocol": "AES"})
- idrac_connection_user_mock.remove_user_account.return_value = {"success": True}
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.remove_user_account.return_value = {
+ "success": True}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
slot_id = 1
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
- self.module.remove_user_account(f_module, idrac_connection_user_mock, slot_uri, slot_id)
+ slot_uri = SLOT_API.format(slot_id)
+ mocker.patch(MODULE_PATH + SLEEP_PATH, return_value=None)
+ self.module.remove_user_account(
+ f_module, idrac_connection_user_mock, slot_uri, slot_id)
def test_remove_user_account_check_mode_4(self, idrac_connection_user_mock, idrac_default_args, mocker):
idrac_default_args.update({"state": "absent", "user_name": "user_name", "new_user_name": None,
@@ -124,10 +149,13 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": None, "enable": False, "sol_enable": False,
"protocol_enable": False, "authentication_protocol": "SHA",
"privacy_protocol": "AES"})
- idrac_connection_user_mock.remove_user_account.return_value = {"success": True}
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.remove_user_account.return_value = {
+ "success": True}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
with pytest.raises(Exception) as exc:
- self.module.remove_user_account(f_module, idrac_connection_user_mock, None, None)
+ self.module.remove_user_account(
+ f_module, idrac_connection_user_mock, None, None)
assert exc.value.args[0] == 'The user account is absent.'
def test_get_user_account_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
@@ -140,10 +168,12 @@ class TestIDRACUser(FakeAnsibleModule):
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.export_scp",
return_value=MagicMock())
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.get_idrac_local_account_attr",
- return_value={"Users.2#UserName": "test_user", "Users.3#UserName": ""})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- response = self.module.get_user_account(f_module, idrac_connection_user_mock)
- assert response[0]["Users.2#UserName"] == "test_user"
+ return_value={USERNAME2: "test_user", "Users.3#UserName": ""})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ response = self.module.get_user_account(
+ f_module, idrac_connection_user_mock)
+ assert response[0][USERNAME2] == "test_user"
assert response[3] == 3
assert response[4] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
@@ -157,9 +187,11 @@ class TestIDRACUser(FakeAnsibleModule):
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.export_scp",
return_value=MagicMock())
mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.get_idrac_local_account_attr",
- return_value={"Users.2#UserName": "test_user", "Users.3#UserName": "test"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- response = self.module.get_user_account(f_module, idrac_connection_user_mock)
+ return_value={USERNAME2: "test_user", "Users.3#UserName": "test"})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ response = self.module.get_user_account(
+ f_module, idrac_connection_user_mock)
assert response[2] == 3
assert response[1] == "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/3"
@@ -170,227 +202,93 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": "Administrator", "enable": True,
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
with pytest.raises(Exception) as err:
self.module.get_user_account(f_module, idrac_connection_user_mock)
assert err.value.args[0] == "User name is not valid."
- def test_create_or_modify_account_1(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ @pytest.mark.parametrize("params", [
+ {"ret_val": SUCCESS_MSG, "empty_slot_id": 2,
+ "empty_slot_uri": SLOT_API.format(2)},
+ {"ret_val": SUCCESS_UPDATED, "slot_id": 2,
+ "slot_uri": SLOT_API.format(2)},
+ {"firm_ver": (14, VERSION), "ret_val": SUCCESS_MSG,
+ "empty_slot_id": 2, "empty_slot_uri": SLOT_API.format(2)},
+ {"firm_ver": (14, VERSION), "ret_val": SUCCESS_UPDATED,
+ "slot_id": 2, "slot_uri": SLOT_API.format(2)},
+ {"firm_ver": (14, VERSION), "ret_val": SUCCESS_UPDATED, "slot_id": 2, "slot_uri": SLOT_API.format(2),
+ "empty_slot_id": 2, "empty_slot_uri": SLOT_API.format(2)},
+ ])
+ def test_create_or_modify_account(self, idrac_connection_user_mock, idrac_default_args, mocker, params):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
"user_name": "test", "user_password": "password",
"privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
"ipmi_serial_privilege": "Administrator", "enable": True,
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
- return_value={"Message": "Successfully created a request."})
- empty_slot_id = 2
- empty_slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(empty_slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
- response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
- empty_slot_id, empty_slot_uri, user_attr)
- assert response[1] == "Successfully created user account."
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idrac_connection_user_mock.get_server_generation = params.get(
+ "firm_ver", (13, VERSION13G))
+ mocker.patch(MODULE_PATH + GET_PAYLOAD,
+ return_value={USERNAME2: "test_user"})
+ mocker.patch(MODULE_PATH + PAYLOAD_XML,
+ return_value=(XML_DATA, {USERNAME2: "test_user"}))
+ mocker.patch(MODULE_PATH + IMPORT_SCP,
+ return_value={"Message": SUCCESS_CREATED})
+ mocker.patch(MODULE_PATH + SLEEP_PATH, return_value=None)
+ mocker.patch(MODULE_PATH + INVOKE_REQUEST,
+ return_value={"Message": SUCCESS_CREATED})
+
+ empty_slot_id = params.get("empty_slot_id", None)
+ empty_slot_uri = params.get("empty_slot_uri", None)
+ slot_id = params.get("slot_id", None)
+ slot_uri = params.get("slot_uri", None)
+ user_attr = {USER2: "test_user"}
- def test_create_or_modify_account_2(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
- mocker.patch(MODULE_PATH + 'idrac_user.time.sleep', return_value=None)
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
- None, None, user_attr)
- assert response[1] == "Successfully updated user account."
-
- def test_create_or_modify_account_3(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"Users.1#UserName": "test_user"}
- with pytest.raises(Exception) as exc:
- self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
- None, None, user_attr)
- assert exc.value.args[0] == "Requested changes are already present in the user slot."
-
- def test_create_or_modify_account_4(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
- idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"Users.1#UserName": "test_user"}
- with pytest.raises(Exception) as exc:
- self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
- None, None, user_attr)
- assert exc.value.args[0] == "No changes found to commit!"
-
- def test_create_or_modify_account_5(self, idrac_connection_user_mock, idrac_default_args, mocker):
+ empty_slot_id, empty_slot_uri, user_attr)
+ assert response[1] == params.get("ret_val")
+
+ @pytest.mark.parametrize("params", [
+ {"ret_val": "Requested changes are already present in the user slot."},
+ {"firm_ver": (14, VERSION), "slot_id": None, "slot_uri": None,
+ "ret_val": "Maximum number of users reached. Delete a user account and retry the operation."},
+ {"check_mode": True, "ret_val": "No changes found to commit!"},
+ {"check_mode": True, "user_attr": {
+ USERNAME1: "test_user"}, "ret_val": CHANGES_FOUND},
+ {"check_mode": True, "user_attr": {USERNAME1: "test_user"}, "ret_val":
+ CHANGES_FOUND, "empty_slot_id": 2, "empty_slot_uri": SLOT_API.format(2)},
+ ])
+ def test_create_or_modify_account_exception(self, idrac_connection_user_mock, idrac_default_args, mocker, params):
idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
"user_name": "test", "user_password": "password",
"privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
"ipmi_serial_privilege": "Administrator", "enable": True,
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
- idrac_connection_user_mock.get_server_generation = (13, "2.70.70.70")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.2#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.import_scp",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"Users.1#UserName": "test_user"}
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=params.get("check_mode", False))
+ idrac_connection_user_mock.get_server_generation = params.get(
+ "firm_ver", (13, VERSION13G))
+ mocker.patch(MODULE_PATH + GET_PAYLOAD,
+ return_value={USERNAME2: "test_user"})
+ mocker.patch(MODULE_PATH + PAYLOAD_XML,
+ return_value=(XML_DATA, {USERNAME2: "test_user"}))
+ mocker.patch(MODULE_PATH + IMPORT_SCP,
+ return_value={"Message": SUCCESS_CREATED})
+ mocker.patch(MODULE_PATH + INVOKE_REQUEST,
+ return_value={"Message": SUCCESS_CREATED})
+ slot_id = params.get("slot_id", 2)
+ slot_uri = params.get("slot_uri", SLOT_API.format(2))
+ empty_slot_id = params.get("empty_slot_id", None)
+ empty_slot_uri = params.get("empty_slot_uri", None)
+ user_attr = params.get("user_attr", {USERNAME2: "test_user"})
with pytest.raises(Exception) as exc:
self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
- None, None, user_attr)
- assert exc.value.args[0] == "Changes found to commit!"
-
- def test_create_or_modify_account_6(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
- slot_id, slot_uri, user_attr)
- assert response[1] == "Successfully created user account."
-
- def test_create_or_modify_account_7(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
- idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- with pytest.raises(Exception) as exc:
- self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
- slot_id, slot_uri, user_attr)
- assert exc.value.args[0] == "Changes found to commit!"
-
- def test_create_or_modify_account_8(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_uri, slot_id,
- None, None, user_attr)
- assert response[1] == "Successfully updated user account."
-
- def test_create_or_modify_account_both_slot_empty_input(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
- return_value={"Message": "Successfully created a request."})
- slot_id = 2
- slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- response = self.module.create_or_modify_account(f_module, idrac_connection_user_mock, slot_id, slot_uri,
- slot_id, slot_uri, user_attr)
- assert response[1] == "Successfully updated user account."
-
- def test_create_or_modify_account_both_slot_empty_none_input(self, idrac_connection_user_mock, idrac_default_args, mocker):
- idrac_default_args.update({"state": "present", "new_user_name": "new_user_name",
- "user_name": "test", "user_password": "password",
- "privilege": "Administrator", "ipmi_lan_privilege": "Administrator",
- "ipmi_serial_privilege": "Administrator", "enable": True,
- "sol_enable": True, "protocol_enable": True,
- "authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
- idrac_connection_user_mock.get_server_generation = (14, "3.60.60.60")
- mocker.patch(MODULE_PATH + "idrac_user.get_payload", return_value={"Users.2#UserName": "test_user"})
- mocker.patch(MODULE_PATH + "idrac_user.convert_payload_xml",
- return_value=("<xml-data>", {"Users.1#UserName": "test_user"}))
- mocker.patch(MODULE_PATH + "idrac_user.iDRACRedfishAPI.invoke_request",
- return_value={"Message": "Successfully created a request."})
- # slot_id = 2
- # slot_uri = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/{0}/".format(slot_id)
- user_attr = {"User.2#UserName": "test_user"}
- with pytest.raises(Exception) as exc:
- self.module.create_or_modify_account(f_module, idrac_connection_user_mock, None, None,
- None, None, user_attr)
- assert exc.value.args[0] == "Maximum number of users reached. Delete a user account and retry the operation."
+ empty_slot_id, empty_slot_uri, user_attr)
+ assert exc.value.args[0] == params.get("ret_val")
@pytest.mark.parametrize("exc_type", [SSLValidationError, URLError, ValueError, TypeError,
ConnectionError, HTTPError, ImportError, RuntimeError])
@@ -403,10 +301,10 @@ class TestIDRACUser(FakeAnsibleModule):
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
- mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account",
+ mocker.patch(MODULE_PATH + CM_ACCOUNT,
side_effect=exc_type('test'))
else:
- mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account",
+ mocker.patch(MODULE_PATH + CM_ACCOUNT,
side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if exc_type != URLError:
@@ -425,7 +323,8 @@ class TestIDRACUser(FakeAnsibleModule):
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
obj = MagicMock()
obj.json_data = {"error": {"message": "Some Error Occured"}}
- mocker.patch(MODULE_PATH + "idrac_user.remove_user_account", return_value=(obj, "error"))
+ mocker.patch(MODULE_PATH + "idrac_user.remove_user_account",
+ return_value=(obj, "error"))
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
assert result['msg'] == "Some Error Occured"
@@ -438,8 +337,10 @@ class TestIDRACUser(FakeAnsibleModule):
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
obj = MagicMock()
- obj.json_data = {"Oem": {"Dell": {"Message": "Unable to complete application of configuration profile values."}}}
- mocker.patch(MODULE_PATH + "idrac_user.remove_user_account", return_value=(obj, "error"))
+ obj.json_data = {"Oem": {"Dell": {
+ "Message": "Unable to complete application of configuration profile values."}}}
+ mocker.patch(MODULE_PATH + "idrac_user.remove_user_account",
+ return_value=(obj, "error"))
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
assert result['msg'] == "Unable to complete application of configuration profile values."
@@ -452,8 +353,9 @@ class TestIDRACUser(FakeAnsibleModule):
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
obj = MagicMock()
- obj.json_data = {"Oem": {"Dell": {"Message": "This Message Does Not Exists"}}}
- mocker.patch(MODULE_PATH + "idrac_user.create_or_modify_account", return_value=(obj, "created"))
+ obj.json_data = {
+ "Oem": {"Dell": {"Message": "This Message Does Not Exists"}}}
+ mocker.patch(MODULE_PATH + CM_ACCOUNT, return_value=(obj, "created"))
# with pytest.raises(Exception) as exc:
result = self._run_module(idrac_default_args)
assert result['changed'] is True
@@ -477,7 +379,8 @@ class TestIDRACUser(FakeAnsibleModule):
"ipmi_serial_privilege": "Administrator", "enable": True,
"sol_enable": True, "protocol_enable": True,
"authentication_protocol": "SHA", "privacy_protocol": "AES"})
- f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
with pytest.raises(Exception) as err:
self.module.validate_input(f_module)
assert err.value.args[0] == "custom_privilege value should be from 0 to 511."
@@ -491,12 +394,14 @@ class TestIDRACUser(FakeAnsibleModule):
is_change_required = self.module.compare_payload(json_payload, None)
assert is_change_required is True
- json_payload = {"Users.1#Privilege": "123"}
- idrac_attr = {"Users.1#Privilege": "123"}
- is_change_required = self.module.compare_payload(json_payload, idrac_attr)
+ json_payload = {USER_PRIVILAGE: "123"}
+ idrac_attr = {USER_PRIVILAGE: "123"}
+ is_change_required = self.module.compare_payload(
+ json_payload, idrac_attr)
assert is_change_required is False
- json_payload = {"Users.1#Privilege": "123"}
- idrac_attr = {"Users.1#Privilege": "124"}
- is_change_required = self.module.compare_payload(json_payload, idrac_attr)
+ json_payload = {USER_PRIVILAGE: "123"}
+ idrac_attr = {USER_PRIVILAGE: "124"}
+ is_change_required = self.module.compare_payload(
+ json_payload, idrac_attr)
assert is_change_required is True
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
index 627c5e71d..1f51f6cae 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_console_preferences.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -2227,12 +2227,12 @@ class TestOmeAppConsolePreferences(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + '_validate_params', side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + '_validate_params',
side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
index 9b92bb3c2..50f9e09e6 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_local_access_configuration.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -59,7 +59,7 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"SettingType": "LocalAccessConfiguration", "EnableChassisDirect": False,
"EnableChassisPowerButton": False, "EnableKvmAccess": True, "EnableLcdOverridePin": False,
"LcdAccess": "VIEW_ONLY", "LcdCustomString": "LCD Text", "LcdLanguage": "en",
- "LcdPresence": "Present", "LcdOverridePin": "123456",
+ "LcdPresence": "Present", "LcdPinLength": 6, "LedPresence": "Absent", "LcdOverridePin": "123456",
"QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
"TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
"EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}},
@@ -86,7 +86,7 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"SettingType": "LocalAccessConfiguration", "EnableChassisDirect": False,
"EnableChassisPowerButton": False, "EnableKvmAccess": True, "EnableLcdOverridePin": False,
"LcdAccess": "VIEW_ONLY", "LcdCustomString": "LCD Text", "LcdLanguage": "en",
- "LcdPresence": "Present", "LcdOverridePin": "123456",
+ "LcdPresence": "Present", "LcdPinLength": 6, "LedPresence": "Absent", "LcdOverridePin": "123456",
"QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
"TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
"EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}},
@@ -287,7 +287,7 @@ class TestOMEMDevicePower(FakeAnsibleModule):
def test_check_mode_validation(self, ome_conn_mock_lac, ome_default_args, ome_response_mock, mocker):
loc_data = {"EnableKvmAccess": True, "EnableChassisDirect": True, "EnableChassisPowerButton": True,
"EnableLcdOverridePin": True, "LcdAccess": True, "LcdCustomString": "LCD Text",
- "LcdLanguage": "en", "LcdOverridePin": "123456", "LcdPresence": "Present",
+ "LcdLanguage": "en", "LcdPresence": "Present", "LcdPinLength": 6, "LedPresence": "Absent", "LcdOverridePin": "123456",
"QuickSync": {"QuickSyncAccess": True, "TimeoutLimit": 10, "EnableInactivityTimeout": True,
"TimeoutLimitUnit": "MINUTES", "EnableReadAuthentication": True,
"EnableQuickSyncWifi": True, "QuickSyncHardware": "Present"}, }
@@ -329,18 +329,12 @@ class TestOMEMDevicePower(FakeAnsibleModule):
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'check_domain_service',
side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
- assert result['failed'] is True
- elif exc_type in [HTTPError]:
- mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
- {"accept-type": "application/json"}, StringIO(json_str)))
result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
side_effect=exc_type(HTTPS_ADDRESS, 400, HTTP_ERROR_MSG,
{"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
index 40fe1b1a2..fd76d6ac9 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_location.py
@@ -112,36 +112,36 @@ class TestOMEMDeviceLocation(FakeAnsibleModule):
@pytest.mark.parametrize("params", [
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Successfully updated the location settings.",
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"device_id": 1234, "data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
{"json_data": {"value": [
{'Id': 1234, 'DeviceServiceTag': 'ABCD123',
- 'PublicAddress': "1.2.3.4", 'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'PublicAddress': "xxx.xxx.x.x", 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Successfully updated the location settings.",
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"device_service_tag": "ABCD123", "data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Successfully updated the location settings.",
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
{"json_data": {"value": [
{'Id': 1234, 'PublicAddress': "dummyhost_shouldnotexist",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Successfully updated the location settings.",
'mparams': {"hostname": "dummyhost_shouldnotexist",
"data_center": "data center",
@@ -159,9 +159,9 @@ class TestOMEMDeviceLocation(FakeAnsibleModule):
@pytest.mark.parametrize("params", [
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "The device location settings operation is supported only on OpenManage Enterprise Modular systems.",
'http_error_json': {
"error": {
@@ -179,14 +179,14 @@ class TestOMEMDeviceLocation(FakeAnsibleModule):
]
}
},
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Unable to complete the operation because the location settings are not supported on the specified device.",
'http_error_json': {
"error": {
@@ -206,16 +206,16 @@ class TestOMEMDeviceLocation(FakeAnsibleModule):
},
'check_domain_service': 'mocked_check_domain_service',
'standalone_chassis': ('Id', 1234),
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': "Unable to complete the operation because the entered target device id '123' is invalid.",
- 'mparams': {"hostname": "1.2.3.4", "device_id": 123,
+ 'mparams': {"hostname": "xxx.xxx.x.x.x.x.x.x", "device_id": 123,
"data_center": "data center",
"room": "room", "aisle": "aisle", "rack": "rack"}
},
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
index 004586393..c0bf63e4a 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_mgmt_network.py
@@ -366,11 +366,11 @@ class TestOmeDeviceMgmtNetwork(FakeAnsibleModule):
{"mparams": {"device_id": 123}, "success": True,
"json_data": {"Type": 2000, "Id": 123, "Identifier": "ABCD123"},
"res": {"Type": 2000, "Id": 123, "Identifier": "ABCD123"},
- "diff": {"IPV4": "1.2.3.4"}},
+ "diff": {"IPV4": "xxx.xxx.x.x"}},
{"mparams": {"device_id": 123}, "success": True,
"json_data": {"Type": 4000, "Id": 123, "Identifier": "ABCD123"},
"res": {"Type": 4000, "Id": 123, "Identifier": "ABCD123"},
- "diff": {"IPV4": "1.2.3.4"}},
+ "diff": {"IPV4": "xxx.xxx.x.x"}},
])
def test_get_network_payload(
self, params, ome_connection_mock_for_device_network, ome_response_mock, mocker):
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
index 553a57369..9a2255c49 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_power_settings.py
@@ -35,6 +35,28 @@ POWER_FAIL_MSG = "Unable to complete the operation because the power settings "
"are not supported on the specified device."
DOMAIN_FAIL_MSG = "The device location settings operation is supported only on " \
"OpenManage Enterprise Modular."
+ERROR_JSON = {
+ "error": {
+ "code": "Base.1.0.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information.",
+ "@Message.ExtendedInfo": [
+ {
+ "MessageId": "CGEN1004",
+ "RelatedProperties": [],
+ "Message": "Unable to process the request because an error occurred.",
+ "MessageArgs": [],
+ "Severity": "Critical",
+ "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
+ }
+ ]
+ }}
+MPARAMS = {"hostname": "xxx.xxx.x.x",
+ "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
+ }
+POWER_JSON_DATA = {"value": [
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
+ 'DeviceId': 1234, "Type": 1000},
+ {'PublicAddress': "xxx.xxx.xx.x", 'DeviceId': 1235, "Type": 1000}]}
@pytest.fixture
@@ -49,11 +71,11 @@ class TestOMEMDevicePower(FakeAnsibleModule):
module = ome_device_power_settings
- @pytest.mark.parametrize("params", [
+ @pytest.mark.parametrize("params_inp", [
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceServiceTag': 'ABCD123', "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}],
"EnableHotSpare": True,
"EnablePowerCapSettings": True,
"MaxPowerCap": "3424",
@@ -63,15 +85,15 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"RedundancyPolicy": "NO_REDUNDANCY",
"SettingType": "Power"},
'message': SUCCESS_MSG,
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"power_configuration": {"enable_power_cap": True, "power_cap": 3424},
"hot_spare_configuration": {"enable_hot_spare": False, "primary_grid": "GRID_1"},
"device_id": 1234,
}},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceServiceTag': 'ABCD123', "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}],
"EnableHotSpare": True,
"EnablePowerCapSettings": True,
"MaxPowerCap": "3424",
@@ -81,15 +103,15 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"RedundancyPolicy": "NO_REDUNDANCY",
"SettingType": "Power"},
'message': SUCCESS_MSG,
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"power_configuration": {"enable_power_cap": False, "power_cap": 3424},
"hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"},
"device_service_tag": 'ABCD123',
}},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}],
"EnableHotSpare": True,
"EnablePowerCapSettings": True,
"MaxPowerCap": "3424",
@@ -99,14 +121,14 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"RedundancyPolicy": "NO_REDUNDANCY",
"SettingType": "Power"},
'message': SUCCESS_MSG,
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"power_configuration": {"enable_power_cap": False, "power_cap": 3424},
"hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"}
}},
{"json_data": {"value": [
{'Id': 1234, 'PublicAddress': "dummyhostname_shouldnotexist",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}],
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}],
"EnableHotSpare": True,
"EnablePowerCapSettings": True,
"MaxPowerCap": "3424",
@@ -121,20 +143,17 @@ class TestOMEMDevicePower(FakeAnsibleModule):
"hot_spare_configuration": {"enable_hot_spare": True, "primary_grid": "GRID_1"}
}}
])
- def test_ome_devices_power_settings_success(self, params, ome_conn_mock_power, ome_response_mock,
+ def test_ome_devices_power_settings_success(self, params_inp, ome_conn_mock_power, ome_response_mock,
ome_default_args, module_mock, mocker):
- ome_response_mock.success = params.get("success", True)
- ome_response_mock.json_data = params['json_data']
- ome_default_args.update(params['mparams'])
+ ome_response_mock.success = params_inp.get("success", True)
+ ome_response_mock.json_data = params_inp['json_data']
+ ome_default_args.update(params_inp['mparams'])
result = self._run_module(
- ome_default_args, check_mode=params.get('check_mode', False))
- assert result['msg'] == params['message']
+ ome_default_args, check_mode=params_inp.get('check_mode', False))
+ assert result['msg'] == params_inp['message']
@pytest.mark.parametrize("params", [
- {"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
- 'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {"json_data": POWER_JSON_DATA,
'message': DOMAIN_FAIL_MSG,
'http_error_json': {
"error": {
@@ -151,77 +170,39 @@ class TestOMEMDevicePower(FakeAnsibleModule):
}
]
}},
- 'mparams': {"hostname": "1.2.3.4",
+ 'mparams': {"hostname": "xxx.xxx.x.x",
"device_service_tag": 'ABCD123',
"power_configuration": {"enable_power_cap": True, "power_cap": 3424}
}},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': POWER_FAIL_MSG,
'check_domain_service': 'mocked_check_domain_service',
'get_chassis_device': ('Id', 1234),
- 'http_error_json': {
- "error": {
- "code": "Base.1.0.GeneralError",
- "message": "A general error has occurred. See ExtendedInfo for more information.",
- "@Message.ExtendedInfo": [
- {
- "MessageId": "CGEN1004",
- "RelatedProperties": [],
- "Message": "Unable to process the request because an error occurred.",
- "MessageArgs": [],
- "Severity": "Critical",
- "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
- }
- ]
- }},
- 'mparams': {"hostname": "1.2.3.4",
- "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
- }},
- {"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
- 'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'http_error_json': ERROR_JSON,
+ 'mparams': MPARAMS},
+ {"json_data": POWER_JSON_DATA,
'message': POWER_FAIL_MSG,
'check_domain_service': 'mocked_check_domain_service',
'get_chassis_device': ('Id', 1234),
'http_err_code': 404,
- 'http_error_json': {
- "error": {
- "code": "Base.1.0.GeneralError",
- "message": "A general error has occurred. See ExtendedInfo for more information.",
- "@Message.ExtendedInfo": [
- {
- "MessageId": "CGEN1004",
- "RelatedProperties": [],
- "Message": "Unable to process the request because an error occurred.",
- "MessageArgs": [],
- "Severity": "Critical",
- "Resolution": "Retry the operation. If the issue persists, contact your system administrator."
- }
- ]
- }},
- 'mparams': {"hostname": "1.2.3.4",
- "power_configuration": {"enable_power_cap": True, "power_cap": 3424}
- }},
- {"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
- 'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ 'http_error_json': ERROR_JSON,
+ 'mparams': MPARAMS},
+ {"json_data": POWER_JSON_DATA,
'message': DEVICE_FAIL_MSG.format('id', 123),
'check_domain_service': 'mocked_check_domain_service',
'get_chassis_device': ('Id', 1234),
- 'mparams': {"hostname": "1.2.3.4", 'device_id': 123,
+ 'mparams': {"hostname": "xxx.xxx.x.x", 'device_id': 123,
"power_configuration": {"enable_power_cap": True, "power_cap": 3424}
}},
{"json_data": {"value": [
- {'Id': 1234, 'PublicAddress': "1.2.3.4",
+ {'Id': 1234, 'PublicAddress': "xxx.xxx.x.x",
'DeviceId': 1234, "Type": 1000},
- {'PublicAddress': "1.2.3.5", 'DeviceId': 1235, "Type": 1000}]},
+ {'PublicAddress': "X.X.X.X", 'DeviceId': 1235, "Type": 1000}]},
'message': CONFIG_FAIL_MSG,
- 'mparams': {"hostname": "1.2.3.4", "device_id": 123}}
+ 'mparams': {"hostname": "xxx.xxx.x.x", "device_id": 123}}
])
def test_ome_devices_power_settings_failure(self, params, ome_conn_mock_power, ome_response_mock,
ome_default_args, module_mock, mocker):
@@ -303,29 +284,29 @@ class TestOMEMDevicePower(FakeAnsibleModule):
result = self.module.get_ip_from_host("ZZ.ZZ.ZZ.ZZ")
assert result == "ZZ.ZZ.ZZ.ZZ"
- @pytest.mark.parametrize("exc_type",
+ @pytest.mark.parametrize("exc_type_ps",
[IOError, ValueError, SSLError, TypeError, ConnectionError, HTTPError, URLError])
- def test_ome_device_power_main_exception_case(self, exc_type, mocker, ome_default_args,
+ def test_ome_device_power_main_exception_case(self, exc_type_ps, mocker, ome_default_args,
ome_conn_mock_power, ome_response_mock):
ome_default_args.update({"device_id": 25011, "power_configuration": {"enable_power_cap": True,
"power_cap": 3424}})
ome_response_mock.status_code = 400
ome_response_mock.success = False
json_str = to_text(json.dumps({"info": "error_details"}))
- if exc_type == URLError:
+ if exc_type_ps == URLError:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type("url open error"))
- result = self._run_module(ome_default_args)
- assert result["unreachable"] is True
- elif exc_type not in [HTTPError, SSLValidationError]:
+ side_effect=exc_type_ps("url open error"))
+ result_ps = self._run_module(ome_default_args)
+ assert result_ps["unreachable"] is True
+ elif exc_type_ps not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
- assert result['failed'] is True
+ side_effect=exc_type_ps("exception message"))
+ result_ps = self._run_module_with_fail_json(ome_default_args)
+ assert result_ps['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
- side_effect=exc_type('https://testhost.com', 400, 'http error message',
- {"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(ome_default_args)
- assert result['failed'] is True
- assert 'msg' in result
+ side_effect=exc_type_ps('https://testhost.com', 400, 'http error message',
+ {"accept-type": "application/json"}, StringIO(json_str)))
+ result_ps = self._run_module_with_fail_json(ome_default_args)
+ assert result_ps['failed'] is True
+ assert 'msg' in result_ps
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
index 60b8c17cc..78299581d 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_device_quick_deploy.py
@@ -267,13 +267,13 @@ class TestOMEMDevicePower(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'check_domain_service', side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'check_domain_service',
side_effect=exc_type(HTTP_ADDRESS, 400, HTTP_ERROR_MSG,
{"accept-type": ACCESS_TYPE}, StringIO(json_str)))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
index 23148d390..f341911cf 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_devices.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -456,12 +456,12 @@ class TestOmeDevices(FakeAnsibleModule):
assert result["unreachable"] is True
elif exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(MODULE_PATH + 'get_dev_ids', side_effect=exc_type("exception message"))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
else:
mocker.patch(MODULE_PATH + 'get_dev_ids',
side_effect=exc_type('https://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
- result = self._run_module_with_fail_json(ome_default_args)
+ result = self._run_module(ome_default_args)
assert result['failed'] is True
assert 'msg' in result
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
index 40160edf5..b1413d4bd 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_redfish_storage_volume.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.1.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -23,6 +23,8 @@ from ansible.module_utils._text import to_text
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
HTTPS_ADDRESS = 'https://testhost.com'
+REDFISH = "/redfish/v1/"
+VOLUME_URI = "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/"
@pytest.fixture
@@ -40,6 +42,10 @@ class TestStorageVolume(FakeAnsibleModule):
def storage_volume_base_uri(self):
self.module.storage_collection_map.update({"storage_base_uri": "/redfish/v1/Systems/System.Embedded.1/Storage"})
+ @pytest.fixture
+ def greater_version(self):
+ return True
+
arg_list1 = [{"state": "present"}, {"state": "present", "volume_id": "volume_id"},
{"state": "absent", "volume_id": "volume_id"},
{"command": "initialize", "volume_id": "volume_id"},
@@ -61,6 +67,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_connection_mock_for_storage_volume, param,
storage_volume_base_uri):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.validate_inputs')
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.is_fw_ver_greater', return_value=True)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.fetch_storage_resource')
mocker.patch(MODULE_PATH + 'redfish_storage_volume.configure_raid_operation',
return_value={"msg": "Successfully submitted volume task.",
@@ -95,8 +102,8 @@ class TestStorageVolume(FakeAnsibleModule):
def test_redfish_storage_volume_main_exception_handling_case(self, exc_type, mocker, redfish_default_args,
redfish_connection_mock_for_storage_volume,
redfish_response_mock):
- redfish_default_args.update({"state": "present"})
- mocker.patch(MODULE_PATH + 'redfish_storage_volume.validate_inputs')
+ redfish_default_args.update({"state": "present", "controller_id": "controller_id"})
+ mocker.patch(MODULE_PATH + 'redfish_storage_volume.is_fw_ver_greater')
redfish_response_mock.status_code = 400
redfish_response_mock.success = False
json_str = to_text(json.dumps({"data": "out"}))
@@ -150,7 +157,7 @@ class TestStorageVolume(FakeAnsibleModule):
assert message["msg"] == "Successfully submitted {0} volume task.".format(action)
@pytest.mark.parametrize("input", [{"state": "present"}, {"state": "absent"}, {"command": "initialize"}, {"command": None}])
- def test_configure_raid_operation(self, input, redfish_connection_mock_for_storage_volume, mocker):
+ def test_configure_raid_operation(self, input, redfish_connection_mock_for_storage_volume, mocker, greater_version):
f_module = self.get_module_mock(params=input)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_volume_create_modify',
return_value={"msg": "Successfully submitted create volume task.",
@@ -164,7 +171,7 @@ class TestStorageVolume(FakeAnsibleModule):
return_value={"msg": "Successfully submitted initialize volume task.",
"task_uri": "JobService/Jobs",
"task_id": "JID_789"})
- message = self.module.configure_raid_operation(f_module, redfish_connection_mock_for_storage_volume)
+ message = self.module.configure_raid_operation(f_module, redfish_connection_mock_for_storage_volume, greater_version)
val = list(input.values())
if val[0] == "present":
assert message["msg"] == "Successfully submitted create volume task."
@@ -257,7 +264,7 @@ class TestStorageVolume(FakeAnsibleModule):
assert exc.value.args[0] == "No changes found to be applied."
def test_perform_volume_create_modify_success_case_01(self, mocker, storage_volume_base_uri,
- redfish_connection_mock_for_storage_volume):
+ redfish_connection_mock_for_storage_volume, greater_version):
f_module = self.get_module_mock(params={"volume_id": "volume_id", "controller_id": "controller_id"})
message = {"msg": "Successfully submitted create volume task.", "task_uri": "JobService/Jobs",
"task_id": "JID_123"}
@@ -265,13 +272,13 @@ class TestStorageVolume(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
- message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume, greater_version)
assert message["msg"] == "Successfully submitted create volume task."
assert message["task_id"] == "JID_123"
def test_perform_volume_create_modify_success_case_02(self, mocker, storage_volume_base_uri,
redfish_connection_mock_for_storage_volume,
- redfish_response_mock):
+ redfish_response_mock, greater_version):
f_module = self.get_module_mock(params={"volume_id": "volume_id"})
message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
"task_id": "JID_123"}
@@ -280,13 +287,13 @@ class TestStorageVolume(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
- message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume, greater_version)
assert message["msg"] == "Successfully submitted modify volume task."
assert message["task_id"] == "JID_123"
def test_perform_volume_create_modify_success_case_03(self, mocker, storage_volume_base_uri,
redfish_connection_mock_for_storage_volume,
- redfish_response_mock):
+ redfish_response_mock, greater_version):
f_module = self.get_module_mock(params={"volume_id": "volume_id"})
message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
"task_id": "JID_123"}
@@ -295,13 +302,13 @@ class TestStorageVolume(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.volume_payload', return_value={"payload": "value"})
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
- message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ message = self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume, greater_version)
assert message["msg"] == "Successfully submitted modify volume task."
assert message["task_id"] == "JID_123"
def test_perform_volume_create_modify_failure_case_01(self, mocker, storage_volume_base_uri,
redfish_connection_mock_for_storage_volume,
- redfish_response_mock):
+ redfish_response_mock, greater_version):
f_module = self.get_module_mock(params={"volume_id": "volume_id"})
message = {"msg": "Successfully submitted modify volume task.", "task_uri": "JobService/Jobs",
"task_id": "JID_123"}
@@ -311,7 +318,7 @@ class TestStorageVolume(FakeAnsibleModule):
mocker.patch(MODULE_PATH + 'redfish_storage_volume.perform_storage_volume_action', return_value=message)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.check_mode_validation', return_value=None)
with pytest.raises(Exception) as exc:
- self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume)
+ self.module.perform_volume_create_modify(f_module, redfish_connection_mock_for_storage_volume, greater_version)
assert exc.value.args[0] == "Input options are not provided for the modify volume task."
def test_perform_storage_volume_action_success_case(self, mocker, redfish_response_mock,
@@ -485,7 +492,7 @@ class TestStorageVolume(FakeAnsibleModule):
self.module.check_physical_disk_exists(f_module, drive)
assert exc.value.args[0] == "No Drive(s) are attached to the specified Controller Id: RAID.Mezzanine.1C-1."
- def test_volume_payload_case_01(self, storage_volume_base_uri):
+ def test_volume_payload_case_01(self, storage_volume_base_uri, greater_version):
param = {
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
"capacity_bytes": 299439751168,
@@ -505,7 +512,7 @@ class TestStorageVolume(FakeAnsibleModule):
"SpanLength": 2,
"WriteCachePolicy": "WriteThrough"}}}}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
"Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
assert payload["RAIDType"] == "RAID0"
@@ -518,19 +525,19 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
assert payload["@Redfish.OperationApplyTime"] == "Immediate"
- def test_volume_payload_case_02(self):
+ def test_volume_payload_case_02(self, greater_version):
param = {"block_size_bytes": 512,
"raid_type": "RAID0",
"name": "VD1",
"optimum_io_size_bytes": 65536}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["RAIDType"] == "RAID0"
assert payload["Name"] == "VD1"
assert payload["BlockSizeBytes"] == 512
assert payload["OptimumIOSizeBytes"] == 65536
- def test_volume_payload_case_03(self, storage_volume_base_uri):
+ def test_volume_payload_case_03(self, storage_volume_base_uri, greater_version):
"""Testing encrypted value in case value is passed false"""
param = {
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
@@ -550,7 +557,7 @@ class TestStorageVolume(FakeAnsibleModule):
"SpanLength": 2,
"WriteCachePolicy": "WriteThrough"}}}}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/" \
"Storage/Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
assert payload["RAIDType"] == "RAID0"
@@ -562,7 +569,7 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
- def test_volume_payload_case_04(self, storage_volume_base_uri):
+ def test_volume_payload_case_04(self, storage_volume_base_uri, greater_version):
param = {
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"],
"capacity_bytes": 299439751168,
@@ -581,7 +588,7 @@ class TestStorageVolume(FakeAnsibleModule):
"SpanLength": 2,
"WriteCachePolicy": "WriteThrough"}}}}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
"Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
assert payload["RAIDType"] == "RAID0"
@@ -593,7 +600,7 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
- def test_volume_payload_case_05(self, storage_volume_base_uri):
+ def test_volume_payload_case_05(self, storage_volume_base_uri, greater_version):
param = {
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
"Disk.Bay.0:Enclosure.Internal.0-1:RAID.Mezzanine.1C-1",
@@ -615,7 +622,7 @@ class TestStorageVolume(FakeAnsibleModule):
"SpanLength": 2,
"WriteCachePolicy": "WriteThrough"}}}}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
"Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
assert payload["RAIDType"] == "RAID6"
@@ -627,7 +634,7 @@ class TestStorageVolume(FakeAnsibleModule):
assert payload["EncryptionTypes"] == ["NativeDriveEncryption"]
assert payload["Dell"]["DellVirtualDisk"]["ReadCachePolicy"] == "NoReadAhead"
- def test_volume_payload_case_06(self, storage_volume_base_uri):
+ def test_volume_payload_case_06(self, storage_volume_base_uri, greater_version):
param = {
"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1",
"Disk.Bay.0:Enclosure.Internal.0-1:RAID.Mezzanine.1C-1",
@@ -653,7 +660,7 @@ class TestStorageVolume(FakeAnsibleModule):
"SpanLength": 2,
"WriteCachePolicy": "WriteThrough"}}}}
f_module = self.get_module_mock(params=param)
- payload = self.module.volume_payload(f_module)
+ payload = self.module.volume_payload(f_module, greater_version)
assert payload["Drives"][0]["@odata.id"] == "/redfish/v1/Systems/System.Embedded.1/Storage/" \
"Drives/Disk.Bay.0:Enclosure.Internal.0-0:RAID.Mezzanine.1C-1"
assert payload["RAIDType"] == "RAID60"
@@ -679,7 +686,7 @@ class TestStorageVolume(FakeAnsibleModule):
"@odata.id": "/redfish/v1/Systems/System.Embedded.1/Storage"
},
}
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
assert self.module.storage_collection_map["storage_base_uri"] == "/redfish/v1/Systems/System.Embedded.1/Storage"
@@ -694,7 +701,7 @@ class TestStorageVolume(FakeAnsibleModule):
}
],
}
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
with pytest.raises(Exception) as exc:
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
assert exc.value.args[0] == "Target out-of-band controller does not support storage feature using Redfish API."
@@ -707,7 +714,7 @@ class TestStorageVolume(FakeAnsibleModule):
"Members": [
],
}
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
with pytest.raises(Exception) as exc:
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
assert exc.value.args[0] == "Target out-of-band controller does not support storage feature using Redfish API."
@@ -716,7 +723,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock):
f_module = self.get_module_mock()
msg = "Target out-of-band controller does not support storage feature using Redfish API."
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 404,
json.dumps(msg), {}, None)
with pytest.raises(Exception) as exc:
@@ -726,7 +733,7 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock):
f_module = self.get_module_mock()
msg = "http error"
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
msg, {}, None)
with pytest.raises(Exception, match=msg) as exc:
@@ -736,13 +743,13 @@ class TestStorageVolume(FakeAnsibleModule):
redfish_response_mock):
f_module = self.get_module_mock()
msg = "connection error"
- redfish_connection_mock_for_storage_volume.root_uri = "/redfish/v1/"
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
redfish_connection_mock_for_storage_volume.invoke_request.side_effect = URLError(msg)
with pytest.raises(Exception, match=msg) as exc:
self.module.fetch_storage_resource(f_module, redfish_connection_mock_for_storage_volume)
def test_check_mode_validation(self, redfish_connection_mock_for_storage_volume,
- redfish_response_mock, storage_volume_base_uri):
+ redfish_response_mock, storage_volume_base_uri, greater_version):
param = {"drives": ["Disk.Bay.0:Enclosure.Internal.0-0:RAID.Integrated.1-1"],
"capacity_bytes": 214748364800, "block_size_bytes": 512, "encryption_types": "NativeDriveEncryption",
"encrypted": False, "raid_type": "RAID0", "optimum_io_size_bytes": 65536}
@@ -751,13 +758,15 @@ class TestStorageVolume(FakeAnsibleModule):
with pytest.raises(Exception) as exc:
self.module.check_mode_validation(
f_module, redfish_connection_mock_for_storage_volume, "create",
- "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ VOLUME_URI,
+ greater_version=True)
assert exc.value.args[0] == "Changes found to be applied."
redfish_response_mock.json_data = {"Members@odata.count": 0}
with pytest.raises(Exception) as exc:
self.module.check_mode_validation(
f_module, redfish_connection_mock_for_storage_volume, "create",
- "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ VOLUME_URI,
+ greater_version=True)
assert exc.value.args[0] == "Changes found to be applied."
redfish_response_mock.json_data = {
"Members@odata.count": 1, "Id": "Disk.Virtual.0:RAID.Integrated.1-1",
@@ -772,18 +781,20 @@ class TestStorageVolume(FakeAnsibleModule):
with pytest.raises(Exception) as exc:
self.module.check_mode_validation(
f_module, redfish_connection_mock_for_storage_volume, "create",
- "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ VOLUME_URI,
+ greater_version=True)
assert exc.value.args[0] == "No changes found to be applied."
def test_check_mode_validation_01(self, redfish_connection_mock_for_storage_volume,
- redfish_response_mock, storage_volume_base_uri):
+ redfish_response_mock, storage_volume_base_uri, greater_version):
param1 = {"volume_id": None, 'name': None}
f_module = self.get_module_mock(params=param1)
f_module.check_mode = False
result = self.module.check_mode_validation(f_module,
redfish_connection_mock_for_storage_volume,
"",
- "/redfish/v1/Systems/System.Embedded.1/Storage/RAID.Integrated.1-1/Volumes/")
+ VOLUME_URI,
+ greater_version=True)
assert not result
def test_check_raid_type_supported_success_case01(self, mocker, redfish_response_mock, storage_volume_base_uri,
@@ -845,29 +856,31 @@ class TestStorageVolume(FakeAnsibleModule):
def test_get_apply_time_success_case_01(self, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"controller_id": "controller_id", "apply_time": "Immediate"}
f_module = self.get_module_mock(params=param)
redfish_response_mock.success = True
redfish_response_mock.json_data = {"@Redfish.OperationApplyTimeSupport": {"SupportedValues": ["Immediate"]}}
self.module.get_apply_time(f_module,
redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
def test_get_apply_time_success_case_02(self, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"controller_id": "controller_id"}
f_module = self.get_module_mock(params=param)
redfish_response_mock.success = True
redfish_response_mock.json_data = {"@Redfish.OperationApplyTimeSupport": {"SupportedValues": ["Immediate"]}}
self.module.get_apply_time(f_module,
redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
def test_get_apply_time_supported_failure_case(self, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"controller_id": "controller_id", "apply_time": "Immediate"}
f_module = self.get_module_mock(params=param)
redfish_response_mock.success = True
@@ -875,25 +888,27 @@ class TestStorageVolume(FakeAnsibleModule):
with pytest.raises(Exception) as exc:
self.module.get_apply_time(f_module,
redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
assert exc.value.args[0] == "Apply time Immediate \
is not supported. The supported values are ['OnReset']. Enter the valid values and retry the operation."
def test_get_apply_time_supported_exception_case(self, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"controller_id": "controller_id", "apply_time": "Immediate"}
f_module = self.get_module_mock(params=param)
redfish_connection_mock_for_storage_volume.invoke_request.side_effect = HTTPError(HTTPS_ADDRESS, 400,
'', {}, None)
with pytest.raises(HTTPError) as ex:
self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
def test_check_apply_time_supported_and_reboot_required_success_case01(self, mocker,
redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"reboot_server": True}
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
@@ -901,13 +916,14 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
apply_time = self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume)
val = self.module.check_apply_time_supported_and_reboot_required(f_module,
redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
assert val
def test_check_apply_time_supported_and_reboot_required_success_case02(self, mocker,
redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"reboot_server": False}
f_module = self.get_module_mock(params=param)
mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
@@ -915,12 +931,13 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
apply_time = self.module.get_apply_time(f_module, redfish_connection_mock_for_storage_volume)
val = self.module.check_apply_time_supported_and_reboot_required(f_module,
redfish_connection_mock_for_storage_volume,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
assert not val
def test_check_job_tracking_required_success_case01(self, mocker, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"job_wait": True}
mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
return_value="OnReset")
@@ -929,12 +946,13 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
val = self.module.check_job_tracking_required(f_module,
redfish_connection_mock_for_storage_volume,
reboot_required=False,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
assert not val
def test_check_job_tracking_required_success_case02(self, mocker, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"job_wait": True}
mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
return_value="Immediate")
@@ -942,12 +960,13 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
val = self.module.check_job_tracking_required(f_module,
redfish_connection_mock_for_storage_volume,
reboot_required=True,
- controller_id="controller_id")
+ controller_id="controller_id",
+ greater_version=True)
assert val
def test_check_job_tracking_required_success_case03(self, mocker, redfish_response_mock,
redfish_connection_mock_for_storage_volume,
- storage_volume_base_uri):
+ storage_volume_base_uri, greater_version):
param = {"job_wait": False}
mocker.patch(MODULE_PATH + 'redfish_storage_volume.get_apply_time',
return_value="Immediate")
@@ -955,7 +974,8 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
val = self.module.check_job_tracking_required(f_module,
redfish_connection_mock_for_storage_volume,
reboot_required=True,
- controller_id=None)
+ controller_id=None,
+ greater_version=True)
assert not val
def test_perform_reboot_timeout_case(self, mocker, redfish_response_mock,
@@ -1129,3 +1149,32 @@ is not supported. The supported values are ['OnReset']. Enter the valid values a
with pytest.raises(Exception) as ex:
self.module.validate_negative_job_time_out(f_module)
assert ex.value.args[0] == "The parameter job_wait_timeout value cannot be negative or zero."
+
+ def test_is_fw_ver_greater(self, redfish_connection_mock_for_storage_volume, redfish_response_mock):
+ # Scenario 1: FW version is not greater
+ redfish_response_mock.json_data = {
+ '@odata.context': '/redfish/v1/$metadata#Manager.Manager',
+ '@odata.id': '/redfish/v1/Managers/iDRAC.Embedded.1',
+ '@odata.type': '#Manager.v1_3_3.Manager',
+ 'FirmwareVersion': '2.81'
+ }
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
+ ver = self.module.is_fw_ver_greater(redfish_connection_mock_for_storage_volume)
+ if ver is True:
+ assert ver is True
+ else:
+ assert ver is False
+
+ # Scenario 1: FW version is not greater
+ redfish_response_mock.json_data = {
+ '@odata.context': '/redfish/v1/$metadata#Manager.Manager',
+ '@odata.id': '/redfish/v1/Managers/iDRAC.Embedded.1',
+ '@odata.type': '#Manager.v1_18_0.Manager',
+ 'FirmwareVersion': '7.10'
+ }
+ redfish_connection_mock_for_storage_volume.root_uri = REDFISH
+ ver = self.module.is_fw_ver_greater(redfish_connection_mock_for_storage_volume)
+ if ver is True:
+ assert ver is True
+ else:
+ assert ver is False
diff --git a/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml b/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
index 988cba19e..058c434e2 100644
--- a/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
+++ b/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
@@ -114,7 +114,7 @@ jobs:
# Ansible-core 2.16 is supported only from Python 3.10 onwards
- python-version: "3.9"
ansible-version: stable-2.16
- - python-version: '3.9'
+ - python-version: "3.9"
ansible-version: devel
steps:
diff --git a/ansible_collections/dellemc/powerflex/CHANGELOG.rst b/ansible_collections/dellemc/powerflex/CHANGELOG.rst
index 6aec79d70..6224280f0 100644
--- a/ansible_collections/dellemc/powerflex/CHANGELOG.rst
+++ b/ansible_collections/dellemc/powerflex/CHANGELOG.rst
@@ -4,6 +4,13 @@ Dellemc.PowerFlex Change Logs
.. contents:: Topics
+v2.4.0
+======
+
+Minor Changes
+-------------
+
+- Added support for executing Ansible PowerFlex modules and roles on AWS environment.
v2.3.0
======
diff --git a/ansible_collections/dellemc/powerflex/FILES.json b/ansible_collections/dellemc/powerflex/FILES.json
index 9d135a9a9..ac7c75c78 100644
--- a/ansible_collections/dellemc/powerflex/FILES.json
+++ b/ansible_collections/dellemc/powerflex/FILES.json
@@ -95,7 +95,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d53fd6c5db3875651823edfc1873621987d946e7dcc591ff4c17eb92963df52",
+ "chksum_sha256": "5804cd1364b07aa6ebe9d2d8b29598dd815c33471f6760da29039c40a6beadba",
"format": 1
},
{
@@ -109,7 +109,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45d7b5658389095f34e42e23f6ef321660ce5d2aebcb3905b688335d396616f8",
+ "chksum_sha256": "453a68b618853e7feccf984745d38fb5566aab5d3de884790f5fa85c28347993",
"format": 1
},
{
@@ -130,7 +130,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5af6cc66393e097bbb1f2bbb1477c602a11564ea1e9e7741d81be037c3976b8d",
+ "chksum_sha256": "7b4e0d601cddc58a5b325e2543789d29ea69f5dd362c080a16c77b0b3239a439",
"format": 1
},
{
@@ -151,7 +151,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "932bfd76a42809ca17a1864376a9a2a88ad857bbb4990734d8522072466dcb8f",
+ "chksum_sha256": "0a9799578efac17952b5672ebb2d3a4f9541aa524ede37aa2ffe0372c0399fd8",
"format": 1
},
{
@@ -200,21 +200,21 @@
"name": "docs/CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "196f895d4e1d69e8831017f36ed709f8ccb7ea035dc97d71abf7213e3e9be868",
+ "chksum_sha256": "f054a45c8a3b7032987d66180a9c5cc852af935e07f633489976b8f3d2b6755f",
"format": 1
},
{
"name": "docs/INSTALLATION.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8fa229fdcec0cb7cdaeb94cfaa429675d14add2d95f77eb4e83a2311ce21203",
+ "chksum_sha256": "782332edfc5dfac225338eec316fcb80264c8a80d64356b3849fa4d1063f4eb7",
"format": 1
},
{
"name": "docs/ISSUE_TRIAGE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8bb1ea4a27677d9eb98f4c938fc0ed55c2e718f2abaa9bd5e1ee708e37ce2e1a",
+ "chksum_sha256": "db3b92f11a5d38287ead84d7baf84655736fd7c377e88bd3fc29f44ea46ff57e",
"format": 1
},
{
@@ -228,21 +228,21 @@
"name": "docs/MAINTAINER_GUIDE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e0970f922c8b97af52284ea00bf00337929c5e764c417152f49036dd23d21b4",
+ "chksum_sha256": "475e92f2e159eaa82f4666d9fd91907000790bea8417fa720d9f57328e101078",
"format": 1
},
{
"name": "docs/Release Notes.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a50b2c32c7ecc642e5794dcd0fbeef0bd4d3b0918300db814a72fd8f59cebd3c",
+ "chksum_sha256": "461b82fb097263724e23d0477036a9fbd7ed46f7a8ad14ff7bfc90e4dc6555a6",
"format": 1
},
{
"name": "docs/SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "081098364e1203078b92bfd17db3186a6dc049cb81ea94059b09882d419a4946",
+ "chksum_sha256": "94ff66c47cb36e079846fd744ad870f627535e64326691b0421cad93feaffca2",
"format": 1
},
{
@@ -263,98 +263,98 @@
"name": "docs/modules/device.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b2a96dab77d81e653c02750a23f0417e1c2ff24b31026dcd0f22134e8c2666e",
+ "chksum_sha256": "724112e62b9e42bf54860d5d6490df28db02f48a1470b222ddb44a7ad830ef8c",
"format": 1
},
{
"name": "docs/modules/fault_set.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ca14b067fd1764db25799feb20a0b15c2999ae9aa9f015ef6b06c8759c34f7f",
+ "chksum_sha256": "8e5cf661716df94032a49f43d5ce8d751dea569def8ac99e26c5cfada44f4f61",
"format": 1
},
{
"name": "docs/modules/info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2919a7d5b06991e9113acd068ab458e05be216f3fe4344cf1691603b9c29884",
+ "chksum_sha256": "6d7cbe381aa23de4ce4acb228213a916f7ac5898ccf95105995134abf2496f3a",
"format": 1
},
{
"name": "docs/modules/mdm_cluster.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "653df53c3af3b726a58b24fc0a78d882655a35ea40283530044ec78b5e3b7023",
+ "chksum_sha256": "9ffc87301f4e67b79451450f964c6559088696c246210342c65678d3b6823eaa",
"format": 1
},
{
"name": "docs/modules/protection_domain.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "789a3c50b0037017815b0f0b911ee462f50389da17ab1d55c649d572f137c822",
+ "chksum_sha256": "a31481e55cbcd48e2de17c5f909958a48a641c9407ca97ac81159d5a732b2769",
"format": 1
},
{
"name": "docs/modules/replication_consistency_group.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d371f5f275878b1e4995902946845a7b1e947705bd432593714d50069d20611e",
+ "chksum_sha256": "a7020f015b38a75b76608685358c0d40f1994e942e23728ba563ba0ad76d92d3",
"format": 1
},
{
"name": "docs/modules/replication_pair.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a1c6ac731f5fd9343b52a740474045f2600ab0867a73692facc619b68cba6ce",
+ "chksum_sha256": "1493e8c1d08acd7c51ee0423e0a688b3ee5801c915fdd3ecbf4c40461516fef7",
"format": 1
},
{
"name": "docs/modules/resource_group.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12edaa4dea0ccd6f9b231b5ea10ddd5b100f4b8d23abf8e278769d7a45002c95",
+ "chksum_sha256": "d38e031f9d39e1c92241fc635abfa1cae0a8081dd77a794b03f7784d98d9eb05",
"format": 1
},
{
"name": "docs/modules/sdc.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fbbcfcfc18c7c8ce473e614d4858887150aa689b4ba251b6f61997011c7c049",
+ "chksum_sha256": "fd486d97fd31a569846b33d38336a705e451f884a2ecd9197a90b01312e48a94",
"format": 1
},
{
"name": "docs/modules/sds.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0646d49b08b50b182c8730a5dd2ee2033e11afbeeac93f3bd45cd3e62cd702ff",
+ "chksum_sha256": "35b848c6fc91ff8af4608d360dc36a1b7a8134712eafd23b6b3c25c1cb4c1d86",
"format": 1
},
{
"name": "docs/modules/snapshot.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf0f97870e12989f585e770efd5571b5de22a6154b73df241c3c1a700e7cb6fe",
+ "chksum_sha256": "df9b5ac178c0a16ba79a5d57a97e4dd0dfbb4e332af9864d8a1b90aa35227ff0",
"format": 1
},
{
"name": "docs/modules/snapshot_policy.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4e54cfcca15c4e906727771c4055b8dce46dd9089daf39d694d82a688599156",
+ "chksum_sha256": "7b71c242c4cad07bd71731058093532976a02f9bc93ac658e65def287971cdf2",
"format": 1
},
{
"name": "docs/modules/storagepool.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6eda9bc51092e6737beb53c7d16b22989888802171c28786ec1459733df62b5f",
+ "chksum_sha256": "2954cea5c6999667466427d000d016ed770a0c9357dde997449b222b28ee8ea6",
"format": 1
},
{
"name": "docs/modules/volume.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67dead1b0e93a922e10e44969da93e2aa17604fc13078d76477f5f39d4391114",
+ "chksum_sha256": "8515171be935508a35837ac2e8f58c5c3ee6f284a0f822b4d74128d2803d93f2",
"format": 1
},
{
@@ -508,7 +508,7 @@
"name": "playbooks/roles/group_vars/all",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05ae8d3b9bba106581f346b384915000397c6a481054917b4a398fa6646fa93b",
+ "chksum_sha256": "229b0b1d3643b3427570244e84222ef016e9265ab31ef15d13ddf96329ae4e82",
"format": 1
},
{
@@ -662,7 +662,7 @@
"name": "plugins/module_utils/storage/dell/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "68b549b4dbeefc2eec8974ddc00db73eaafde275a1306522d18a75eb3ae1f963",
+ "chksum_sha256": "f9cdf312c0aea0c6686bcf9d1121049e222050d11a1be6c51fcbe9dab64892e8",
"format": 1
},
{
@@ -1236,7 +1236,7 @@
"name": "roles/powerflex_config/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3118853f9e23c714f8c950127932f93c233be8b0c23cfffe7adc089e84db1128",
+ "chksum_sha256": "10f2358dbee525cf86fc27e1496b394bfaeb6ddcdce7af7accb194315861444b",
"format": 1
},
{
@@ -2062,7 +2062,7 @@
"name": "roles/powerflex_sdc/tasks/install_sdc.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "edcacc16abd2e2ddca0262e215130db8851d691f1c52ec54b641a1390180b386",
+ "chksum_sha256": "891ab050e6db8b216367d2075f31356aec6684f686e9a909c50924f70ede0e14",
"format": 1
},
{
@@ -2286,7 +2286,7 @@
"name": "roles/powerflex_sdr/tasks/add_sdr.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc927ff472f1c3ad858e340c56d7e82c9bdfcbb44d48e5a9d03338285689f129",
+ "chksum_sha256": "0ef7dde0476382d5348a15a3f59870c4623789c200a4710eb9e7db3ce205c3c3",
"format": 1
},
{
@@ -2440,7 +2440,7 @@
"name": "roles/powerflex_sds/tasks/install_sds.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23d5238154edc4827205019c6b649941414f6f80155d6b0273a74c6f435f3c46",
+ "chksum_sha256": "eb7b2291ea143accdb28777ab6bd4b5929ebd0f569891d9b47ce13ad8b0b9b76",
"format": 1
},
{
@@ -2629,7 +2629,7 @@
"name": "roles/powerflex_tb/tasks/install_tb3x.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97b692e2c115a1e7b4b11f4e8de9dd5260720f683dae1259c2840eff2701fa2d",
+ "chksum_sha256": "25228bf930d81d4d45480318f52af3cf2d16c4a616d5f22f44a27918f5898c67",
"format": 1
},
{
@@ -2657,7 +2657,7 @@
"name": "roles/powerflex_tb/tasks/uninstall_tb.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8cecdc3db2cde3ad690e85e5f61e60532228c0448f8d9211a7caa502c783fa03",
+ "chksum_sha256": "b74b0f64a0f62bb36dc08e77a2684901e48c545affc69e57ffca9258a016ce2e",
"format": 1
},
{
@@ -2804,7 +2804,7 @@
"name": "roles/powerflex_webui/tasks/install_webui.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "744e4d2c5c8c207cb66f0182a8693398337b7b01187bff23bfc93187db151af8",
+ "chksum_sha256": "dcba9892f0bfb4b79ae5c229b1b6d6e5fe1ecc577ceca21a0461f8158d396572",
"format": 1
},
{
@@ -2878,6 +2878,13 @@
"format": 1
},
{
+ "name": "tests/sanity/ignore-2.18.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "51c1ac0a0e1858fc26f4e609e997a6084f49f18ee72bbed0b0e26377174ac60c",
+ "format": 1
+ },
+ {
"name": "tests/unit",
"ftype": "dir",
"chksum_type": null,
diff --git a/ansible_collections/dellemc/powerflex/MANIFEST.json b/ansible_collections/dellemc/powerflex/MANIFEST.json
index d52c820f7..9e4656fef 100644
--- a/ansible_collections/dellemc/powerflex/MANIFEST.json
+++ b/ansible_collections/dellemc/powerflex/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "dellemc",
"name": "powerflex",
- "version": "2.3.0",
+ "version": "2.4.0",
"authors": [
"Akash Shendge <ansible.team@dell.com>",
"Arindam Datta <ansible.team@dell.com>",
@@ -24,16 +24,16 @@
],
"license_file": null,
"dependencies": {},
- "repository": "https://github.com/dell/ansible-powerflex/tree/2.3.0",
- "documentation": "https://github.com/dell/ansible-powerflex/tree/2.3.0/docs",
- "homepage": "https://github.com/dell/ansible-powerflex/tree/2.3.0",
+ "repository": "https://github.com/dell/ansible-powerflex/tree/2.4.0",
+ "documentation": "https://github.com/dell/ansible-powerflex/tree/2.4.0/docs",
+ "homepage": "https://github.com/dell/ansible-powerflex/tree/2.4.0",
"issues": "https://www.dell.com/community/Automation/bd-p/Automation"
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c91fc714e4e70b371ad5de1191d1e8d5a6a829f347016680a0ce39e4931fceb4",
+ "chksum_sha256": "1e3524ca4d32c06f7162058bfe2e094e72e1b205ab39316a94dde334b0d59992",
"format": 1
},
"format": 1
diff --git a/ansible_collections/dellemc/powerflex/README.md b/ansible_collections/dellemc/powerflex/README.md
index 155631def..6bb8ee215 100644
--- a/ansible_collections/dellemc/powerflex/README.md
+++ b/ansible_collections/dellemc/powerflex/README.md
@@ -6,29 +6,29 @@ The capabilities of the Ansible modules are managing SDCs, volumes, snapshots, s
## Table of contents
-* [Code of conduct](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/CODE_OF_CONDUCT.md)
-* [Maintainer guide](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/MAINTAINER_GUIDE.md)
-* [Committer guide](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/COMMITTER_GUIDE.md)
-* [Contributing guide](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/CONTRIBUTING.md)
-* [Branching strategy](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/BRANCHING.md)
-* [List of adopters](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/ADOPTERS.md)
-* [Maintainers](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/MAINTAINERS.md)
-* [Support](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/SUPPORT.md)
+* [Code of conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CODE_OF_CONDUCT.md)
+* [Maintainer guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/MAINTAINER_GUIDE.md)
+* [Committer guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/COMMITTER_GUIDE.md)
+* [Contributing guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CONTRIBUTING.md)
+* [Branching strategy](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/BRANCHING.md)
+* [List of adopters](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/ADOPTERS.md)
+* [Maintainers](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/MAINTAINERS.md)
+* [Support](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/SUPPORT.md)
* [License](#license)
-* [Security](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/SECURITY.md)
+* [Security](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/SECURITY.md)
* [Prerequisites](#prerequisites)
* [List of Ansible modules for Dell PowerFlex](#list-of-ansible-modules-for-dell-powerflex)
* [Installation and execution of Ansible modules for Dell PowerFlex](#installation-and-execution-of-ansible-modules-for-dell-powerflex)
* [Releasing, Maintenance and Deprecation](#releasing-maintenance-and-deprecation)
## License
-The Ansible collection for PowerFlex is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-powerflex/blob/2.3.0/LICENSE) for the full terms. Ansible modules and modules utilities that are part of the Ansible collection for PowerFlex are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-powerflex/blob/2.3.0/MODULE-LICENSE) for the full terms.
+The Ansible collection for PowerFlex is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-powerflex/blob/2.4.0/LICENSE) for the full terms. Ansible modules and modules utilities that are part of the Ansible collection for PowerFlex are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-powerflex/blob/2.4.0/MODULE-LICENSE) for the full terms.
## Prerequisites
| **Ansible Modules** | **PowerFlex/VxFlex OS Version** | **SDK version** | **Python version** | **Ansible** |
|---------------------|-----------------------|-------|--------------------|--------------------------|
-| v2.3.0 |3.6 <br> 4.0 <br> 4.5 | 1.10.0 | 3.9.x <br> 3.10.x <br> 3.11.x | 2.14 <br> 2.15 <br> 2.16 |
+| v2.4.0 |3.6 <br> 4.0 <br> 4.5 | 1.11.0 | 3.9.x <br> 3.10.x <br> 3.11.x | 2.14 <br> 2.15 <br> 2.16 |
* Please follow PyPowerFlex installation instructions on [PyPowerFlex Documentation](https://github.com/dell/python-powerflex)
@@ -36,22 +36,22 @@ The Ansible collection for PowerFlex is released and licensed under the GPL-3.0
The modules are written in such a way that all requests are idempotent and hence fault-tolerant. It essentially means that the result of a successfully performed request is independent of the number of times it is executed.
## List of Ansible modules for Dell PowerFlex
- * [Info module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/info.rst)
- * [Snapshot module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/snapshot.rst)
- * [SDC module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/sdc.rst)
- * [Storage pool module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/storagepool.rst)
- * [Volume module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/volume.rst)
- * [SDS module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/sds.rst)
- * [Device Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/device.rst)
- * [Protection Domain Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/protection_domain.rst)
- * [MDM Cluster Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/mdm_cluster.rst)
- * [Replication Consistency Group Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/replication_consistency_group.rst)
- * [Replication Pair Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/replication_pair.rst)
- * [Snapshot Policy Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/snapshot_policy.rst)
- * [Fault Sets Module](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/modules/fault_set.rst)
+ * [Info module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/info.rst)
+ * [Snapshot module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/snapshot.rst)
+ * [SDC module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/sdc.rst)
+ * [Storage pool module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/storagepool.rst)
+ * [Volume module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/volume.rst)
+ * [SDS module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/sds.rst)
+ * [Device Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/device.rst)
+ * [Protection Domain Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/protection_domain.rst)
+ * [MDM Cluster Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/mdm_cluster.rst)
+ * [Replication Consistency Group Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/replication_consistency_group.rst)
+ * [Replication Pair Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/replication_pair.rst)
+ * [Snapshot Policy Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/snapshot_policy.rst)
+ * [Fault Sets Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/fault_set.rst)
## Installation and execution of Ansible modules for Dell PowerFlex
-The installation and execution steps of Ansible modules for Dell PowerFlex can be found [here](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/INSTALLATION.md).
+The installation and execution steps of Ansible modules for Dell PowerFlex can be found [here](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/INSTALLATION.md).
## Releasing, Maintenance and Deprecation
@@ -59,6 +59,6 @@ Ansible Modules for Dell Technologies PowerFlex follows [Semantic Versioning](ht
New version will be release regularly if significant changes (bug fix or new feature) are made in the collection.
-Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/BRANCHING.md).
+Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/BRANCHING.md).
Ansible Modules for Dell Technologies PowerFlex deprecation cycle is aligned with that of [Ansible](https://docs.ansible.com/ansible/latest/dev_guide/module_lifecycle.html). \ No newline at end of file
diff --git a/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml b/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
index 2c8926ed1..a4fb3c6c8 100644
--- a/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
+++ b/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
@@ -158,3 +158,8 @@ releases:
name: resource_group
namespace: ''
release_date: '2024-03-29'
+ 2.4.0:
+ changes:
+ minor_changes:
+ - Added support for executing Ansible PowerFlex modules and roles on AWS environment.
+ release_date: '2024-04-30'
diff --git a/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md b/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
index 9c45af69c..84531753a 100644
--- a/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
+++ b/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
@@ -10,7 +10,7 @@ You may obtain a copy of the License at
# How to contribute
-Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.3.0/CODE_OF_CONDUCT.md).
+Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/CODE_OF_CONDUCT.md).
## Table of contents
@@ -76,7 +76,7 @@ Triage helps ensure that issues resolve quickly by:
If you don't have the knowledge or time to code, consider helping with _issue triage_. The Ansible modules for Dell PowerFlex community will thank you for saving them time by spending some of yours.
-Read more about the ways you can [Triage issues](https://github.com/dell/ansible-powerflex/blob/2.3.0/ISSUE_TRIAGE.md).
+Read more about the ways you can [Triage issues](https://github.com/dell/ansible-powerflex/blob/2.4.0/ISSUE_TRIAGE.md).
## Your first contribution
@@ -89,7 +89,7 @@ When you're ready to contribute, it's time to create a pull request.
## Branching
-* [Branching Strategy for Ansible modules for Dell PowerFlex](https://github.com/dell/ansible-powerflex/blob/2.3.0/BRANCHING.md)
+* [Branching Strategy for Ansible modules for Dell PowerFlex](https://github.com/dell/ansible-powerflex/blob/2.4.0/BRANCHING.md)
## Signing your commits
@@ -144,7 +144,7 @@ Make sure that the title for your pull request uses the same format as the subje
### Quality gates for pull requests
-GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-powerflex/blob/2.3.0/SUPPORT.md).
+GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-powerflex/blob/2.4.0/SUPPORT.md).
#### Code sanitization
diff --git a/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md b/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
index 1aed4d367..c2b8df3ba 100644
--- a/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
+++ b/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
@@ -41,7 +41,7 @@ You may obtain a copy of the License at
* Download the latest tar build from any of the available distribution channel [Ansible Galaxy](https://galaxy.ansible.com/dellemc/powerflex) /[Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/powerflex) and use this command to install the collection anywhere in your system:
- ansible-galaxy collection install dellemc-powerflex-2.3.0.tar.gz -p <install_path>
+ ansible-galaxy collection install dellemc-powerflex-2.4.0.tar.gz -p <install_path>
* Set the environment variable:
@@ -68,7 +68,7 @@ You may obtain a copy of the License at
## Ansible modules execution
-The Ansible server must be configured with Python library for PowerFlex to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
+The Ansible server must be configured with Python library for PowerFlex to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
## SSL certificate validation
diff --git a/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md b/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
index 939aca18d..50d4665ef 100644
--- a/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
+++ b/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
@@ -43,8 +43,8 @@ Should explain what happened, what was expected and how to reproduce it together
- Ansible Version: [e.g. 2.14]
- Python Version [e.g. 3.11]
- - Ansible modules for Dell PowerFlex Version: [e.g. 2.3.0]
- - PowerFlex SDK version: [e.g. PyPowerFlex 1.10.0]
+ - Ansible modules for Dell PowerFlex Version: [e.g. 2.4.0]
+ - PowerFlex SDK version: [e.g. PyPowerFlex 1.11.0]
- Any other additional information...
#### Feature requests
diff --git a/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md b/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
index b6efdd20b..5f982c2f9 100644
--- a/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
+++ b/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
@@ -27,7 +27,7 @@ If a candidate is approved, a Maintainer contacts the candidate to invite them t
## Maintainer policies
* Lead by example
-* Follow the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.3.0/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-powerflex/blob/2.3.0/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-powerflex/blob/2.3.0/COMMITTER_GUIDE.md) guides
+* Follow the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-powerflex/blob/2.4.0/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-powerflex/blob/2.4.0/COMMITTER_GUIDE.md) guides
* Promote a friendly and collaborative environment within our community
* Be actively engaged in discussions, answering questions, updating defects, and reviewing pull requests
* Criticize code, not people. Ideally, tell the contributor a better way to do what they need.
diff --git a/ansible_collections/dellemc/powerflex/docs/Release Notes.md b/ansible_collections/dellemc/powerflex/docs/Release Notes.md
index e9c5f24c2..1a2552326 100644
--- a/ansible_collections/dellemc/powerflex/docs/Release Notes.md
+++ b/ansible_collections/dellemc/powerflex/docs/Release Notes.md
@@ -1,6 +1,6 @@
**Ansible Modules for Dell Technologies PowerFlex**
=========================================
-### Release notes 2.3.0
+### Release notes 2.4.0
> © 2024 Dell Inc. or its subsidiaries. All rights reserved. Dell
> and other trademarks are trademarks of Dell Inc. or its
@@ -28,7 +28,7 @@ Table 1. Revision history
| Revision | Date | Description |
|----------|-----------------|-------------------------------------------------------------|
-| 01 | March 2024 | Current release of Ansible Modules for Dell PowerFlex 2.3.0 |
+| 01 | April 2024 | Current release of Ansible Modules for Dell PowerFlex 2.4.0 |
Product description
-------------------
@@ -44,6 +44,7 @@ each of the entities.
New features and enhancements
-----------------------------
Along with the previous release deliverables, this release supports following features -
+- Added support for executing Ansible PowerFlex modules and roles on AWS environment.
- Added support for resource group provisioning to validate, deploy, edit, add nodes and delete a resource group.
- The Info module is enhanced to list out all the firmware repository.
- Added support for PowerFlex ansible modules and roles on Azure.
@@ -62,11 +63,11 @@ Limitations
Distribution
------------
The software package is available for download from the [Ansible Modules
-for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.3.0) page.
+for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.4.0) page.
Documentation
-------------
-The documentation is available on [Ansible Modules for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.3.0/docs)
+The documentation is available on [Ansible Modules for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.4.0/docs)
page. It includes the following:
- README
diff --git a/ansible_collections/dellemc/powerflex/docs/SECURITY.md b/ansible_collections/dellemc/powerflex/docs/SECURITY.md
index 5ebcaa3ee..a7eab1ba4 100644
--- a/ansible_collections/dellemc/powerflex/docs/SECURITY.md
+++ b/ansible_collections/dellemc/powerflex/docs/SECURITY.md
@@ -12,7 +12,7 @@ You may obtain a copy of the License at
The Ansible modules for Dell PowerFlex repository are inspected for security vulnerabilities via blackduck scans and static code analysis.
-In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-powerflex/blob/2.3.0/docs/CONTRIBUTING.md#Pull-requests) for more information.
+In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CONTRIBUTING.md#Pull-requests) for more information.
## Reporting a vulnerability
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/device.rst b/ansible_collections/dellemc/powerflex/docs/modules/device.rst
index 4fcd82854..cbeb0f813 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/device.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/device.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst b/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
index 55b9972bc..191ab73ca 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/info.rst b/ansible_collections/dellemc/powerflex/docs/modules/info.rst
index 7b933203f..fd674804f 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/info.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/info.rst
@@ -24,7 +24,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.10.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst b/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
index babb39b6c..fa73ae5d6 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
@@ -24,7 +24,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst b/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
index 84c640ef4..0bd532b9a 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst b/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
index 76d959026..d8d144077 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst b/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
index 254a2eb1d..7c883c6fd 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst b/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
index e8bdbde09..a72918d83 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.10.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst b/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
index ad375ebc7..7b0871b30 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/sds.rst b/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
index f5c29516f..188fe9f9a 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst b/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
index 052453ad2..e09e80069 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst b/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
index deab7f050..dd683c921 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.8.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst b/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
index 76d94966d..f9f3f271f 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
@@ -22,7 +22,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/volume.rst b/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
index f3345a6d1..16dbf2b25 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
@@ -24,7 +24,7 @@ The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
- Ansible-core 2.14 or later.
-- PyPowerFlex 1.9.0.
+- PyPowerFlex 1.11.0.
- Python 3.9, 3.10 or 3.11.
diff --git a/ansible_collections/dellemc/powerflex/playbooks/roles/group_vars/all b/ansible_collections/dellemc/powerflex/playbooks/roles/group_vars/all
index 1031958fc..5cec51f77 100644
--- a/ansible_collections/dellemc/powerflex/playbooks/roles/group_vars/all
+++ b/ansible_collections/dellemc/powerflex/playbooks/roles/group_vars/all
@@ -1,6 +1,7 @@
powerflex_common_file_install_location: "/var/tmp"
powerflex_common_esxi_files_location: "/tmp/"
powerflex_common_win_package_location: "C:\\Windows\\Temp"
+powerflex_gateway_disable_gpg_check: true
# powerflex sdc params
powerflex_sdc_driver_sync_repo_address: 'ftp://ftp.emc.com/'
powerflex_sdc_driver_sync_repo_user: 'QNzgdxXix'
diff --git a/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py b/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
index e3ba11971..50f41666c 100644
--- a/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
+++ b/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
@@ -86,7 +86,7 @@ def ensure_required_libs(module):
module.fail_json(msg=missing_required_lib("PyPowerFlex V 1.10.0 or above"),
exception=POWERFLEX_SDK_IMP_ERR)
- min_ver = '1.10.0'
+ min_ver = '1.11.0'
try:
curr_version = pkg_resources.require("PyPowerFlex")[0].version
supported_version = (parse_version(curr_version) >= parse_version(min_ver))
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
index f9340f0fd..67bad8013 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
@@ -4,6 +4,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
register: powerflex_config_mdm_ip_result
@@ -13,6 +14,7 @@
ansible.builtin.set_fact:
powerflex_config_array_version: "{{ powerflex_config_mdm_ip_result.mdm_cluster_details.master.versionInfo[1] }}"
powerflex_config_mdm_primary_hostname: "{{ hostvars[groups['mdm'][0]]['inventory_hostname'] }}"
+ powerflex_config_mdm_primary_ip: "{{ hostvars[groups['mdm'][0]]['ansible_host'] }}"
- name: Login to primary MDM of PowerFlex 3.6
ansible.builtin.command: scli --login --username {{ username }} --password "{{ password }}"
@@ -22,8 +24,30 @@
delegate_to: "{{ powerflex_config_mdm_primary_hostname }}"
when: powerflex_config_array_version == '3'
-- name: Login to primary MDM of PowerFlex 4.5
- ansible.builtin.command: scli --login --username {{ username }} --management_system_ip {{ hostname }} --password "{{ password }}"
+- name: Generate login certificate for PowerFlex version 4.x
+ block:
+ - name: Generate login certificate using management_system_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+ --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_config_generate_login_certificate
+ changed_when: powerflex_config_generate_login_certificate.rc == 0
+ delegate_to: "{{ powerflex_config_mdm_primary_hostname }}"
+ when: powerflex_config_array_version == '4'
+ rescue:
+ - name: Generate login certificate using primary_mdm_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ powerflex_config_mdm_primary_ip }} --username {{ username }}
+ --password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_config_generate_login_certificate_mdm_ip
+ changed_when: powerflex_config_generate_login_certificate_mdm_ip.rc == 0
+ delegate_to: "{{ powerflex_config_mdm_primary_hostname }}"
+ when: powerflex_config_array_version == '4'
+
+- name: Login to MDM for PowerFlex version 4.x
+ ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
register: powerflex_config_login_output
changed_when: powerflex_config_login_output.rc == 0
@@ -61,6 +85,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
storage_pool_name: "{{ powerflex_storage_pool_name }}"
protection_domain_name: "{{ powerflex_protection_domain_name }}"
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
index 9b75321c3..27c82db27 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
@@ -4,6 +4,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
register: powerflex_sdc_mdm_ip_result
@@ -71,5 +72,4 @@
group: "root"
notify: restart scini
when:
- - ansible_distribution != "VMkernel"
- - " 'WindowsOS' not in ansible_distribution"
+ - ansible_distribution not in ['WindowsOS', 'SLES', 'VMkernel']
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
index 1af345276..f7cbfa378 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
@@ -4,6 +4,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
register: powerflex_sdr_mdm_ip_result
@@ -39,17 +40,34 @@
no_log: true
when: powerflex_sdr_array_version == "3"
-- name: Login to mdm for PowerFlex version 4.x
- ansible.builtin.command: >
- scli --login --management_system_ip {{ hostname }}
- --username admin
- --password "{{ password }}"
- --approve_certificate
+- name: Generate login certificate for PowerFlex version 4.x
+ block:
+ - name: Generate login certificate using management_system_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+ --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sdr_generate_login_certificate
+ changed_when: powerflex_sdr_generate_login_certificate.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != "3"
+ rescue:
+ - name: Generate login certificate using primary_mdm_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ powerflex_sdr_primary_mdm_ip }} --username {{ username }}
+ --password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sdr_generate_login_certificate_mdm_ip
+ changed_when: powerflex_sdr_generate_login_certificate_mdm_ip.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != "3"
+
+- name: Login to MDM for PowerFlex version 4.x
+ ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
- register: powerflex_initial_login
+ register: powerflex_sdr_login_output
+ changed_when: powerflex_sdr_login_output.rc == 0
delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
- changed_when: powerflex_initial_login.rc == 0
- no_log: true
when: powerflex_sdr_array_version != "3"
- name: Output msg of previous task login to mdm
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
index 8887ff13c..010aee075 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
@@ -4,6 +4,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
register: powerflex_sds_mdm_ip_result
@@ -17,6 +18,7 @@
ansible.builtin.set_fact:
powerflex_sds_mdm_ips: "{{ powerflex_sds_mdm_ip_result.mdm_cluster_details.mdmAddresses | join(',') }}"
powerflex_sds_primary_mdm_hostname: "{{ hostvars[groups['mdm'][0]]['inventory_hostname'] }}"
+ powerflex_sds_primary_mdm_ip: "{{ hostvars[groups['mdm'][0]]['ansible_host'] }}"
- name: Include install_powerflex.yml
ansible.builtin.include_tasks: ../../powerflex_common/tasks/install_powerflex.yml
@@ -35,8 +37,30 @@
ansible.builtin.set_fact:
disks: "{{ powerflex_sds_disks }}"
+- name: Generate login certificate for PowerFlex version 4.x
+ block:
+ - name: Generate login certificate using management_system_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+ --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sds_generate_login_certificate
+ changed_when: powerflex_sds_generate_login_certificate.rc == 0
+ delegate_to: "{{ powerflex_sds_primary_mdm_hostname }}"
+ when: powerflex_sds_array_version != "3"
+ rescue:
+ - name: Generate login certificate using primary_mdm_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ powerflex_sds_primary_mdm_ip }} --username {{ username }}
+ --password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sds_generate_login_certificate_mdm_ip
+ changed_when: powerflex_sds_generate_login_certificate_mdm_ip.rc == 0
+ delegate_to: "{{ powerflex_sds_primary_mdm_hostname }}"
+ when: powerflex_sds_array_version != "3"
+
- name: Login to MDM for PowerFlex version 4.x
- ansible.builtin.command: scli --login --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }} --approve_certificate
+ ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
register: powerflex_sds_login_output
changed_when: powerflex_sds_login_output.rc == 0
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb3x.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb3x.yml
index e602351da..ebb2011cc 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb3x.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb3x.yml
@@ -4,6 +4,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: "present"
register: powerflex_tb_mdm_ip_result
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/uninstall_tb.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/uninstall_tb.yml
index b08bffed8..82de6e004 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/uninstall_tb.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/uninstall_tb.yml
@@ -29,9 +29,30 @@
when: powerflex_tb_mdm_cluster_mode[0] == "5_node" and powerflex_tb_scli_version[0] == '3'
# Switch from three or five to cluster one node for PowerFlex version 4.5
-- name: Login to primary MDM node of PowerFlex version 4.5
- ansible.builtin.command: >
- scli --login --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+- name: Generate login certificate for PowerFlex version 4.x
+ block:
+ - name: Generate login certificate using management_system_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+ --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_tb_generate_login_certificate
+ changed_when: powerflex_tb_generate_login_certificate.rc == 0
+ delegate_to: "{{ powerflex_tb_mdm_primary_hostname }}"
+ when: powerflex_tb_scli_version[0] >= '4'
+ rescue:
+ - name: Generate login certificate using primary_mdm_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ powerflex_tb_primary_ip }} --username {{ username }}
+ --password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_tb_generate_login_certificate_mdm_ip
+ changed_when: powerflex_tb_generate_login_certificate_mdm_ip.rc == 0
+ delegate_to: "{{ powerflex_tb_mdm_primary_hostname }}"
+ when: powerflex_tb_scli_version[0] >= '4'
+
+- name: Login to MDM for PowerFlex version 4.x
+ ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
register: powerflex_tb_login_output
changed_when: powerflex_tb_login_output.rc == 0
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_webui/tasks/install_webui.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_webui/tasks/install_webui.yml
index 13d58ffac..71886a075 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_webui/tasks/install_webui.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_webui/tasks/install_webui.yml
@@ -10,6 +10,7 @@
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
+ port: "{{ port }}"
validate_certs: "{{ validate_certs }}"
state: present
register: powerflex_webui_result
diff --git a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.18.txt b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.18.txt
new file mode 100644
index 000000000..a464e7b26
--- /dev/null
+++ b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.18.txt
@@ -0,0 +1,14 @@
+plugins/modules/device.py validate-modules:missing-gplv3-license
+plugins/modules/sdc.py validate-modules:missing-gplv3-license
+plugins/modules/sds.py validate-modules:missing-gplv3-license
+plugins/modules/snapshot.py validate-modules:missing-gplv3-license
+plugins/modules/storagepool.py validate-modules:missing-gplv3-license
+plugins/modules/volume.py validate-modules:missing-gplv3-license
+plugins/modules/info.py validate-modules:missing-gplv3-license
+plugins/modules/protection_domain.py validate-modules:missing-gplv3-license
+plugins/modules/mdm_cluster.py validate-modules:missing-gplv3-license
+plugins/modules/replication_consistency_group.py validate-modules:missing-gplv3-license
+plugins/modules/replication_pair.py validate-modules:missing-gplv3-license
+plugins/modules/snapshot_policy.py validate-modules:missing-gplv3-license
+plugins/modules/fault_set.py validate-modules:missing-gplv3-license
+plugins/modules/resource_group.py validate-modules:missing-gplv3-license
diff --git a/ansible_collections/dellemc/unity/.github/workflows/ansible-test.yml b/ansible_collections/dellemc/unity/.github/workflows/ansible-test.yml
index 58d3ea030..ff8d181f8 100644
--- a/ansible_collections/dellemc/unity/.github/workflows/ansible-test.yml
+++ b/ansible_collections/dellemc/unity/.github/workflows/ansible-test.yml
@@ -2,9 +2,8 @@ name: CI
on:
push:
- branches: [ main ]
+ branches: [main]
pull_request:
- branches: [ main ]
schedule:
- cron: '0 3 * * *'
@@ -15,15 +14,15 @@ jobs:
strategy:
fail-fast: false
matrix:
- ansible-version: [stable-2.13]
+ ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
steps:
- name: Check out code
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- - name: Set up Python 3.9
- uses: actions/setup-python@v1
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v4
with:
- python-version: 3.9
+ python-version: 3.11
- name: Install ansible (${{ matrix.ansible-version }})
run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check
@@ -39,45 +38,36 @@ jobs:
###
# Unit tests (OPTIONAL)
- #
+ #
# https://docs.ansible.com/ansible/latest/dev_guide/testing_units.html
unit:
- name: Unit Tests
+ name: Unit Tests (â’¶${{ matrix.ansible }} with ${{ matrix.python }} python)
needs: [build]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
- python-version: ["3.9", "3.10", "3.11"]
- ansible-version: [stable-2.13, stable-2.14, stable-2.15]
+ python: ['3.9', '3.10', '3.11']
+ ansible:
+ - stable-2.14
+ - stable-2.15
+ - stable-2.16
+ - devel
exclude:
- # Python 3.11 is supported only from ansible-core 2.14 onwards
- - python-version: "3.11"
- ansible-version: stable-2.13
+ - ansible: stable-2.16
+ python: '3.9'
+ - ansible: devel
+ python: '3.9'
steps:
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
+ - name: Perform unit testing with ansible-test
+ uses: ansible-community/ansible-test-gh-action@release/v1
with:
- python-version: ${{ matrix.python-version }}
-
- - name: Install ansible (${{ matrix.ansible-version }}) version
- run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check
-
- - name: Download migrated collection artifacts
- uses: actions/download-artifact@v1
- with:
- name: collection
- path: .cache/collection-tarballs
-
- - name: Setup Unit test Pre-requisites
- run: |
- ansible-galaxy collection install .cache/collection-tarballs/*.tar.gz
- if [ -f /home/runner/.ansible/collections/ansible_collections/dellemc/unity/tests/requirements.txt ]; then pip install -r /home/runner/.ansible/collections/ansible_collections/dellemc/unity/tests/requirements.txt; fi
- - name: Run Unit tests using ansible-test
- run: ansible-test units -v --color --python ${{ matrix.python-version }} --coverage
- working-directory: /home/runner/.ansible/collections/ansible_collections/dellemc/unity
+ testing-type: units
+ coverage: always
+ ansible-core-version: ${{ matrix.ansible }}
+ target-python-version: ${{ matrix.python }}
###
# Sanity tests (REQUIRED)
@@ -85,40 +75,31 @@ jobs:
# https://docs.ansible.com/ansible/latest/dev_guide/testing_sanity.html
sanity:
- name: Sanity Tests
- runs-on: ubuntu-latest
+ name: Sanity (â’¶${{ matrix.ansible }} with ${{ matrix.python }} python)
needs: [build]
strategy:
- fail-fast: false
matrix:
- ansible-version: [stable-2.13, stable-2.14, stable-2.15]
-
+ python: ['3.9', '3.10', '3.11']
+ ansible:
+ - stable-2.14
+ - stable-2.15
+ - stable-2.16
+ - devel
+ exclude:
+ - ansible: stable-2.16
+ python: '3.9'
+ - ansible: devel
+ python: '3.9'
+ runs-on: ubuntu-latest
steps:
- - name: Set up Python 3.9
- uses: actions/setup-python@v1
- with:
- # it is just required to run that once as "ansible-test sanity" in the docker image
- # will run on all python versions it supports.
- python-version: 3.9
-
- - name: Install ansible (${{ matrix.ansible-version }}) version
- run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check
-
- - name: Download migrated collection artifacts
- uses: actions/download-artifact@v1
+ - name: Perform sanity testing
+ uses: ansible-community/ansible-test-gh-action@release/v1
with:
- name: collection
- path: .cache/collection-tarballs
-
- - name: Setup Sanity test Pre-requisites
- run: ansible-galaxy collection install .cache/collection-tarballs/*.tar.gz
-
- # run ansible-test sanity inside of Docker.
- # The docker container has all the pinned dependencies that are required
- # and all python versions ansible supports.
- - name: Run sanity tests
- run: ansible-test sanity --docker -v --color
- working-directory: /home/runner/.ansible/collections/ansible_collections/dellemc/unity
+ ansible-core-version: ${{ matrix.ansible }}
+ target-python-version: ${{ matrix.python }}
+ testing-type: sanity
+ pull-request-change-detection: true
+ coverage: never
lint:
name: Ansible lint
@@ -128,7 +109,13 @@ jobs:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11"]
- ansible-version: [stable-2.13, stable-2.14, stable-2.15]
+ ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
+ exclude:
+ # Ansible-core 2.16 is supported only from Python 3.10 onwards
+ - python-version: "3.9"
+ ansible-version: stable-2.16
+ - python-version: '3.9'
+ ansible-version: devel
steps:
# Important: This sets up your GITHUB_WORKSPACE environment variable
@@ -138,7 +125,7 @@ jobs:
fetch-depth: 0 # needed for progressive mode to work
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
diff --git a/ansible_collections/dellemc/unity/CHANGELOG.rst b/ansible_collections/dellemc/unity/CHANGELOG.rst
index cf231b653..db0cba7c9 100644
--- a/ansible_collections/dellemc/unity/CHANGELOG.rst
+++ b/ansible_collections/dellemc/unity/CHANGELOG.rst
@@ -5,6 +5,14 @@ Dellemc.Unity Change Log
.. contents:: Topics
+v2.0.0
+======
+
+Major Changes
+-------------
+
+- Adding support for Unity Puffin v5.4.
+
v1.7.1
======
diff --git a/ansible_collections/dellemc/unity/FILES.json b/ansible_collections/dellemc/unity/FILES.json
index 24ae1cf93..41fadaabe 100644
--- a/ansible_collections/dellemc/unity/FILES.json
+++ b/ansible_collections/dellemc/unity/FILES.json
@@ -88,14 +88,14 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d648c6b2038a891200af1f6ae981928a37427a14b230e0b7b6ba030cae29a37a",
+ "chksum_sha256": "8037200617d1da54d6765489808d93bdfec01ee19dbfb2501f75472b129cfdbd",
"format": 1
},
{
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f7304d22c291fa4120f7f01ce4db2d000c00d422fd5fb2a4b2cc771e49c43f6",
+ "chksum_sha256": "49a0181850cb1896fb91301602356f71bac6229aedbfa49e39165cf2f1c6fc16",
"format": 1
},
{
@@ -116,7 +116,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9dda8bf38a7da066fc2fbfff79d3945479c31549df311b3d18229fb2e3634ed",
+ "chksum_sha256": "acc1e7f5a6d9b72232c26f27a06ecbf34043892d00be230bf3da54127e4a78f0",
"format": 1
},
{
@@ -130,14 +130,14 @@
"name": "changelogs/.plugin-cache.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4869399d305f2a50f7f0abe8a42823ecd1ca153957ed55d8b913bfda4b9dbfae",
+ "chksum_sha256": "72d1d5770f6286724c4779f5cb9834744592752ec99e45c61b0aa5999424664e",
"format": 1
},
{
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c48d1c2f86f067385dfee3d1c7e4355b42325860936dd4395ecde150b7d894d",
+ "chksum_sha256": "3e57f1dd9c6bdcfaf29a2929b93a16dee2ec96969ae0e2982be0e5fb41ddc3bf",
"format": 1
},
{
@@ -186,21 +186,21 @@
"name": "docs/CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4cf604f9b0b86445fa475876590076d016fa422d2b062e4242e6c4060e3b6738",
+ "chksum_sha256": "8d2a10e85fc5c68607e5083dcb778fcb660fe6b572c8bd31afdcce5c9bc6320a",
"format": 1
},
{
"name": "docs/INSTALLATION.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb198be51142a91a0196541ff636aabbb68f17e34de329213a41c83ad3059923",
+ "chksum_sha256": "00f59d4b14622649360cef3f83f3aee72f4a0f46b7b650ae9b4a33dc66e4515c",
"format": 1
},
{
"name": "docs/ISSUE_TRIAGE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5a6b4bd9e4875696f16454844a45f7b2cf95102f1960c1a25cf5e2dafff3e14",
+ "chksum_sha256": "1348c93aaa1d3836b185e4c23aa184c413a65374323f11a8fe34efcc1d8573bb",
"format": 1
},
{
@@ -214,21 +214,21 @@
"name": "docs/MAINTAINER_GUIDE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c3558b79f0913255880f5d2066b98dd2ca5e1e51bce28ccb3bf6cac390a23d7",
+ "chksum_sha256": "eeb2a871e29bec756dad05bab627baa40a897bfde5026b7f3d62b1af30d0972b",
"format": 1
},
{
"name": "docs/Release Notes.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d01761e2b3a2260eeb24e776c773a89a37389156e7e4d43b9c77d24d0506afa",
+ "chksum_sha256": "9c615cde875875a36bc46715104048e6ccb4cb7b53a8bab1d61b90214d27ee71",
"format": 1
},
{
"name": "docs/SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "11f2d0c94f6b4e19e25a683d25d7dda948f9c4c05bd23fc5e40eeaf23f84cf00",
+ "chksum_sha256": "894bd4282359004513506335feccdf9a793089a9c526715769709a066f8181b4",
"format": 1
},
{
@@ -389,7 +389,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d8f17122fc4d22811162d2eb588ef6ffdc292b62b1df6beea44e5f5fedad1d6",
+ "chksum_sha256": "6b62595744e5b1751e79f2f3b85307187267ed35fa9e07e51565977aceea730c",
"format": 1
},
{
@@ -550,7 +550,7 @@
"name": "plugins/doc_fragments/unity.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf04d65ed0cf78f3979951670bace9f5930ede590cafc562bc7db97ce16238d9",
+ "chksum_sha256": "97effad53be187a731145b4a9c9677761cf7ff4a0cf161c79a24978e3979efc1",
"format": 1
},
{
@@ -613,21 +613,21 @@
"name": "plugins/modules/consistencygroup.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4db39611403cf3c3acd0290d26f7de37bf680a50d51b86106b1658060f9e3af3",
+ "chksum_sha256": "fb947e18d9f16759c9d70d798da599c3ab70657b5a607a976580d6ae18fd8385",
"format": 1
},
{
"name": "plugins/modules/filesystem.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c3f3ff2fd8bb07a600a25cecaf00caa533dd8d242903cdd24dc8c25381953d63",
+ "chksum_sha256": "9ac48b349e8a531bc2afef0738027e235a88258dd1fe425776156b84b19f5829",
"format": 1
},
{
"name": "plugins/modules/filesystem_snapshot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9c6eb9dbf17604409652740babf1bac714c487d56f78dae4fd5dbab88037cb2",
+ "chksum_sha256": "f1b23cb93d784f389f4cb77a8cdd87d0290306c61b81a33a19f2e135824fbb72",
"format": 1
},
{
@@ -641,35 +641,35 @@
"name": "plugins/modules/info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9653d4b095f23e5b1dfd53c5485611457bffe6807643f898635d4bc41c74630e",
+ "chksum_sha256": "43357b1c6078d84c8b62f99e79acec81bcbf5cabc42075999ada0becc858c50b",
"format": 1
},
{
"name": "plugins/modules/interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed2d921df52b3a54e41f5279240402f290c9f4d5a7c36c4ccb442fb0b7bc0f02",
+ "chksum_sha256": "6535e43d4c858d48866f6e3c155eddcc05c477eef51e73ae1102639431e0fb66",
"format": 1
},
{
"name": "plugins/modules/nasserver.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9909cc77062b9e43e54f2b62b3407412cc68915b5e7bc8f0b3726bec091b381",
+ "chksum_sha256": "17bf38bb733c7246c6dc65699d835795d06697f9284ad1fb0f6b0a8ea10da085",
"format": 1
},
{
"name": "plugins/modules/nfs.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72d5eb3a6bed5969eb5e656bdf1965bce77a790c7e0f6909019ab493dd7cb08e",
+ "chksum_sha256": "07405be0249c332c2322c1c46e51b3bb48e8217f5be338487df4cc5bf8c5e8be",
"format": 1
},
{
"name": "plugins/modules/nfsserver.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4629fa9ca28f77bd3de962fe5ee226a814153bdce75d3d8c6089210217a4c3e2",
+ "chksum_sha256": "debebbe1ef1c98c722bb679353dd7e92b3e4a34954a3a8b32dad08b0df23755d",
"format": 1
},
{
@@ -690,42 +690,42 @@
"name": "plugins/modules/snapshot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05f8a7b43e33347e1a71283c87f1af24a696836b52ffb271e96c601ca74d6ba4",
+ "chksum_sha256": "9a9dec578628d100f86732add0e2bd4be2405838c860204c7d1fa1e3f5c8a412",
"format": 1
},
{
"name": "plugins/modules/snapshotschedule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6214c681ce55b24a89c8307e55d56f770665227d40929e918d906c20570a0c2d",
+ "chksum_sha256": "c9df424fece2c13b309104226532c1a93a9599d74a303edfb27b631d3fa8e800",
"format": 1
},
{
"name": "plugins/modules/storagepool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da90ecfe49e95add45dd9b936905d5d8e3076ad3aab4416ec9823583ad1c4cd3",
+ "chksum_sha256": "af53c0decab1b392b20629063ac56a14ca33590fca39d1874ce443891ec9d1f3",
"format": 1
},
{
"name": "plugins/modules/tree_quota.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "494320b0e7cc55515bb85d9a39e20f4c01a8dfbafae9b3855e46ec3a3c98898b",
+ "chksum_sha256": "a482d31390d4bf1e3b5b9ec0292b117d048e6358188c83bfc57436023b545b7f",
"format": 1
},
{
"name": "plugins/modules/user_quota.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd9b8bc4f0b76cea3e13d0ccf7ec7ac1f41ab3d73609d732c07720aac1df99b1",
+ "chksum_sha256": "9050b4578577fee994f237d3a51d1741eef8d6c8491b7e9f904f830804fba3dd",
"format": 1
},
{
"name": "plugins/modules/volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d93a6b6a055cbe33647c1386b2e9efdc86465c286a5a79b02e0370497a8b4b2b",
+ "chksum_sha256": "fee4db702477dd8aabe83516e4db9c23d1af7fcd25fc62bf22e8577e1348da88",
"format": 1
},
{
@@ -750,13 +750,6 @@
"format": 1
},
{
- "name": "tests/requirements.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "65e6091d1c8d88a703555bd13590bb95248fb0b7376d3ed1d660e2b9d65581c8",
- "format": 1
- },
- {
"name": "tests/sanity",
"ftype": "dir",
"chksum_type": null,
@@ -764,24 +757,31 @@
"format": 1
},
{
- "name": "tests/sanity/ignore-2.13.txt",
+ "name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "8286d2f238aa5a2835bdd8a9ff38663a0e70b416a2b4a2971a54d75d76a349e7",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
+ "name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "8286d2f238aa5a2835bdd8a9ff38663a0e70b416a2b4a2971a54d75d76a349e7",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.15.txt",
+ "name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8286d2f238aa5a2835bdd8a9ff38663a0e70b416a2b4a2971a54d75d76a349e7",
+ "chksum_sha256": "99746c6aa2cae22324034503cb91b384afa154aa8a5a14b89c9d0857efff1d28",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.17.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9df23cfd54d708a73ad687d70f1d124a54a162647ab4eccdee4ffe21a419d896",
"format": 1
},
{
@@ -993,6 +993,13 @@
"chksum_type": "sha256",
"chksum_sha256": "5bc48d2969cfaa5670ab538ba51cef532e1c3177004e2a2d6dbbd2cd7b4e7714",
"format": 1
+ },
+ {
+ "name": "tests/unit/requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "65e6091d1c8d88a703555bd13590bb95248fb0b7376d3ed1d660e2b9d65581c8",
+ "format": 1
}
],
"format": 1
diff --git a/ansible_collections/dellemc/unity/MANIFEST.json b/ansible_collections/dellemc/unity/MANIFEST.json
index fbd2511d5..ad7d7f85d 100644
--- a/ansible_collections/dellemc/unity/MANIFEST.json
+++ b/ansible_collections/dellemc/unity/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "dellemc",
"name": "unity",
- "version": "1.7.1",
+ "version": "2.0.0",
"authors": [
"Akash Shendge <ansible.team@dell.com>",
"Ambuj Dubey <ansible.team@dell.com>",
@@ -25,16 +25,16 @@
],
"license_file": null,
"dependencies": {},
- "repository": "https://github.com/dell/ansible-unity/tree/1.7.1",
- "documentation": "https://github.com/dell/ansible-unity/tree/1.7.1/docs",
- "homepage": "https://github.com/dell/ansible-unity/tree/1.7.1",
+ "repository": "https://github.com/dell/ansible-unity/tree/2.0.0",
+ "documentation": "https://github.com/dell/ansible-unity/tree/2.0.0/docs",
+ "homepage": "https://github.com/dell/ansible-unity/tree/2.0.0",
"issues": "https://www.dell.com/community/Automation/bd-p/Automation"
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a6859700a30b9a90ae32f32e2e99b09a7289bf793832b1657a943ecdb8604d8",
+ "chksum_sha256": "0cde7569a5266f0469d36b79b9c3e2618cf770d1da7f06b2060bc529f28f59a9",
"format": 1
},
"format": 1
diff --git a/ansible_collections/dellemc/unity/README.md b/ansible_collections/dellemc/unity/README.md
index a50754721..19d5b1757 100644
--- a/ansible_collections/dellemc/unity/README.md
+++ b/ansible_collections/dellemc/unity/README.md
@@ -6,60 +6,60 @@ The capabilities of the Ansible modules are managing consistency groups, filesys
## Table of contents
-* [Code of conduct](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CODE_OF_CONDUCT.md)
-* [Maintainer guide](https://github.com/dell/ansible-unity/blob/1.7.1/docs/MAINTAINER_GUIDE.md)
-* [Committer guide](https://github.com/dell/ansible-unity/blob/1.7.1/docs/COMMITTER_GUIDE.md)
-* [Contributing guide](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CONTRIBUTING.md)
-* [Branching strategy](https://github.com/dell/ansible-unity/blob/1.7.1/docs/BRANCHING.md)
-* [List of adopters](https://github.com/dell/ansible-unity/blob/1.7.1/docs/ADOPTERS.md)
-* [Maintainers](https://github.com/dell/ansible-unity/blob/1.7.1/docs/MAINTAINERS.md)
-* [Support](https://github.com/dell/ansible-unity/blob/1.7.1/docs/SUPPORT.md)
+* [Code of conduct](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CODE_OF_CONDUCT.md)
+* [Maintainer guide](https://github.com/dell/ansible-unity/blob/2.0.0/docs/MAINTAINER_GUIDE.md)
+* [Committer guide](https://github.com/dell/ansible-unity/blob/2.0.0/docs/COMMITTER_GUIDE.md)
+* [Contributing guide](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CONTRIBUTING.md)
+* [Branching strategy](https://github.com/dell/ansible-unity/blob/2.0.0/docs/BRANCHING.md)
+* [List of adopters](https://github.com/dell/ansible-unity/blob/2.0.0/docs/ADOPTERS.md)
+* [Maintainers](https://github.com/dell/ansible-unity/blob/2.0.0/docs/MAINTAINERS.md)
+* [Support](https://github.com/dell/ansible-unity/blob/2.0.0/docs/SUPPORT.md)
* [License](#license)
-* [Security](https://github.com/dell/ansible-unity/blob/1.7.1/docs/SECURITY.md)
+* [Security](https://github.com/dell/ansible-unity/blob/2.0.0/docs/SECURITY.md)
* [Prerequisites](#prerequisites)
* [List of Ansible modules for Dell Unity](#list-of-ansible-modules-for-dell-unity)
* [Installation and execution of Ansible modules for Dell Unity](#installation-and-execution-of-ansible-modules-for-dell-unity)
* [Releasing, Maintenance and Deprecation](#releasing-maintenance-and-deprecation)
## License
-The Ansible collection for Unity is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-unity/blob/1.7.1/LICENSE) for the full terms. Ansible modules and module utilities that are part of the Ansible collection for Unity are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-unity/blob/1.7.1/MODULE-LICENSE) for the full terms.
+The Ansible collection for Unity is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-unity/blob/2.0.0/LICENSE) for the full terms. Ansible modules and module utilities that are part of the Ansible collection for Unity are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-unity/blob/2.0.0/MODULE-LICENSE) for the full terms.
## Supported Platforms
- * Dell Unity Arrays version 5.1, 5.2, 5.3
+ * Dell Unity Arrays version 5.2, 5.3, 5.4
## Prerequisites
This table provides information about the software prerequisites for the Ansible Modules for Dell Unity.
| **Ansible Modules** | **Python version** | **Storops - Python SDK version** | **Ansible** |
|---------------------|--------------------|----------------------------------|-------------|
-| v1.7.1 | 3.9 <br> 3.10 <br> 3.11 | 1.2.11 | 2.13 <br> 2.14 <br> 2.15|
+| v2.0.0 | 3.9 <br> 3.10 <br> 3.11 | 1.2.11 | 2.14 <br> 2.15 <br> 2.16|
## Idempotency
The modules are written in such a way that all requests are idempotent and hence fault-tolerant. It essentially means that the result of a successfully performed request is independent of the number of times it is executed.
## List of Ansible Modules for Dell Unity
- * [Consistency group module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/consistencygroup.rst)
- * [Filesystem module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/filesystem.rst)
- * [Filesystem snapshot module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/filesystem_snapshot.rst)
- * [Info module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/info.rst)
- * [Host module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/host.rst)
- * [CIFS server module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/cifsserver.rst)
- * [NAS server module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/nasserver.rst)
- * [NFS server module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/nfsserver.rst)
- * [NFS export module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/nfs.rst)
- * [SMB share module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/smbshare.rst)
- * [Interface module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/interface.rst)
- * [Snapshot module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/snapshot.rst)
- * [Snapshot schedule module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/snapshotschedule.rst)
- * [Storage pool module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/storagepool.rst)
- * [User quota module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/user_quota.rste)
- * [Quota tree module ](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/tree_quota.rst)
- * [Volume module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/volume.rst)
- * [Replication session module](https://github.com/dell/ansible-unity/blob/1.7.1/docs/modules/replication_session.rst)
+ * [Consistency group module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/consistencygroup.rst)
+ * [Filesystem module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/filesystem.rst)
+ * [Filesystem snapshot module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/filesystem_snapshot.rst)
+ * [Info module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/info.rst)
+ * [Host module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/host.rst)
+ * [CIFS server module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/cifsserver.rst)
+ * [NAS server module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/nasserver.rst)
+ * [NFS server module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/nfsserver.rst)
+ * [NFS export module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/nfs.rst)
+ * [SMB share module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/smbshare.rst)
+ * [Interface module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/interface.rst)
+ * [Snapshot module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/snapshot.rst)
+ * [Snapshot schedule module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/snapshotschedule.rst)
+ * [Storage pool module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/storagepool.rst)
+ * [User quota module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/user_quota.rste)
+ * [Quota tree module ](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/tree_quota.rst)
+ * [Volume module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/volume.rst)
+ * [Replication session module](https://github.com/dell/ansible-unity/blob/2.0.0/docs/modules/replication_session.rst)
## Installation and execution of Ansible modules for Dell Unity
-The installation and execution steps of Ansible modules for Dell Unity can be found [here](https://github.com/dell/ansible-unity/blob/1.7.1/docs/INSTALLATION.md).
+The installation and execution steps of Ansible modules for Dell Unity can be found [here](https://github.com/dell/ansible-unity/blob/2.0.0/docs/INSTALLATION.md).
## Releasing, Maintenance and Deprecation
@@ -67,6 +67,6 @@ Ansible Modules for Dell Technnologies Unity follows [Semantic Versioning](https
New version will be release regularly if significant changes (bug fix or new feature) are made in the collection.
-Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-unity/blob/1.7.1/docs/BRANCHING.md).
+Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-unity/blob/2.0.0/docs/BRANCHING.md).
Ansible Modules for Dell Technologies Unity deprecation cycle is aligned with that of [Ansible](https://docs.ansible.com/ansible/latest/dev_guide/module_lifecycle.html).
diff --git a/ansible_collections/dellemc/unity/changelogs/.plugin-cache.yaml b/ansible_collections/dellemc/unity/changelogs/.plugin-cache.yaml
index 5e4c94e39..cbb44069c 100644
--- a/ansible_collections/dellemc/unity/changelogs/.plugin-cache.yaml
+++ b/ansible_collections/dellemc/unity/changelogs/.plugin-cache.yaml
@@ -106,4 +106,4 @@ plugins:
strategy: {}
test: {}
vars: {}
-version: 1.7.1
+version: 2.0.0
diff --git a/ansible_collections/dellemc/unity/changelogs/changelog.yaml b/ansible_collections/dellemc/unity/changelogs/changelog.yaml
index 6ab226ac6..719a979b6 100644
--- a/ansible_collections/dellemc/unity/changelogs/changelog.yaml
+++ b/ansible_collections/dellemc/unity/changelogs/changelog.yaml
@@ -174,3 +174,8 @@ releases:
minor_changes:
- Patch update to fix import errors in utils file.
release_date: '2023-07-31'
+ 2.0.0:
+ changes:
+ major_changes:
+ - Adding support for Unity Puffin v5.4.
+ release_date: '2024-03-29'
diff --git a/ansible_collections/dellemc/unity/docs/CONTRIBUTING.md b/ansible_collections/dellemc/unity/docs/CONTRIBUTING.md
index f26c1cd08..67199cd04 100644
--- a/ansible_collections/dellemc/unity/docs/CONTRIBUTING.md
+++ b/ansible_collections/dellemc/unity/docs/CONTRIBUTING.md
@@ -10,7 +10,7 @@ You may obtain a copy of the License at
# How to contribute
-Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CODE_OF_CONDUCT.md).
+Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CODE_OF_CONDUCT.md).
## Table of contents
@@ -76,7 +76,7 @@ Triage helps ensure that issues resolve quickly by:
If you don't have the knowledge or time to code, consider helping with _issue triage_. The Ansible modules for Dell Unity community will thank you for saving them time by spending some of yours.
-Read more about the ways you can [Triage issues](https://github.com/dell/ansible-unity/blob/1.7.1/docs/ISSUE_TRIAGE.md).
+Read more about the ways you can [Triage issues](https://github.com/dell/ansible-unity/blob/2.0.0/docs/ISSUE_TRIAGE.md).
## Your first contribution
@@ -89,7 +89,7 @@ When you're ready to contribute, it's time to create a pull request.
## Branching
-* [Branching Strategy for Ansible modules for Dell Unity](https://github.com/dell/ansible-unity/blob/1.7.1/docs/BRANCHING.md)
+* [Branching Strategy for Ansible modules for Dell Unity](https://github.com/dell/ansible-unity/blob/2.0.0/docs/BRANCHING.md)
## Signing your commits
@@ -144,7 +144,7 @@ Make sure that the title for your pull request uses the same format as the subje
### Quality gates for pull requests
-GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-unity/blob/1.7.1/docs/SUPPORT.md).
+GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-unity/blob/2.0.0/docs/SUPPORT.md).
#### Code sanitization
diff --git a/ansible_collections/dellemc/unity/docs/INSTALLATION.md b/ansible_collections/dellemc/unity/docs/INSTALLATION.md
index e361588f1..8ab4d1932 100644
--- a/ansible_collections/dellemc/unity/docs/INSTALLATION.md
+++ b/ansible_collections/dellemc/unity/docs/INSTALLATION.md
@@ -35,7 +35,7 @@ You may obtain a copy of the License at
* Download the latest tar build from any of the available distribution channel [Ansible Galaxy](https://galaxy.ansible.com/dellemc/unity) /[Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/unity) and use this command to install the collection anywhere in your system:
- ansible-galaxy collection install dellemc-unity-1.7.1.tar.gz -p <install_path>
+ ansible-galaxy collection install dellemc-unity-2.0.0.tar.gz -p <install_path>
* Set the environment variable:
@@ -62,7 +62,7 @@ You may obtain a copy of the License at
## Ansible modules execution
-The Ansible server must be configured with Python library for Unity to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-unity/blob/1.7.1/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
+The Ansible server must be configured with Python library for Unity to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-unity/blob/2.0.0/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
## SSL certificate validation
diff --git a/ansible_collections/dellemc/unity/docs/ISSUE_TRIAGE.md b/ansible_collections/dellemc/unity/docs/ISSUE_TRIAGE.md
index 2e25b256a..2d448a852 100644
--- a/ansible_collections/dellemc/unity/docs/ISSUE_TRIAGE.md
+++ b/ansible_collections/dellemc/unity/docs/ISSUE_TRIAGE.md
@@ -41,9 +41,9 @@ This section describes the various issue templates and the expected content.
Should explain what happened, what was expected and how to reproduce it together with any additional information that may help giving a complete picture of what happened such as screenshots, output and any environment related information that's applicable and/or maybe related to the reported problem:
- - Ansible Version: [e.g. 2.14]
+ - Ansible Version: [e.g. 2.16]
- Python Version [e.g. 3.10]
- - Ansible modules for Dell Unity Version: [e.g. 1.7.1]
+ - Ansible modules for Dell Unity Version: [e.g. 2.0.0]
- Unity SDK version: [e.g. Unity 1.2.11]
- Any other additional information...
diff --git a/ansible_collections/dellemc/unity/docs/MAINTAINER_GUIDE.md b/ansible_collections/dellemc/unity/docs/MAINTAINER_GUIDE.md
index a46a3d37d..c9687e1f4 100644
--- a/ansible_collections/dellemc/unity/docs/MAINTAINER_GUIDE.md
+++ b/ansible_collections/dellemc/unity/docs/MAINTAINER_GUIDE.md
@@ -27,7 +27,7 @@ If a candidate is approved, a Maintainer contacts the candidate to invite them t
## Maintainer policies
* Lead by example
-* Follow the [Code of Conduct](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-unity/blob/1.7.1/docs/COMMITTER_GUIDE.md) guides
+* Follow the [Code of Conduct](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-unity/blob/2.0.0/docs/COMMITTER_GUIDE.md) guides
* Promote a friendly and collaborative environment within our community
* Be actively engaged in discussions, answering questions, updating defects, and reviewing pull requests
* Criticize code, not people. Ideally, tell the contributor a better way to do what they need.
diff --git a/ansible_collections/dellemc/unity/docs/Release Notes.md b/ansible_collections/dellemc/unity/docs/Release Notes.md
index 4243667c1..966a00ffd 100644
--- a/ansible_collections/dellemc/unity/docs/Release Notes.md
+++ b/ansible_collections/dellemc/unity/docs/Release Notes.md
@@ -1,8 +1,8 @@
**Ansible Modules for Dell Technologies Unity**
=========================================
-### Release Notes 1.7.1
+### Release Notes 2.0.0
-> © 2022 Dell Inc. or its subsidiaries. All rights reserved. Dell
+> © 2024 Dell Inc. or its subsidiaries. All rights reserved. Dell
> and other trademarks are trademarks of Dell Inc. or its
> subsidiaries. Other trademarks may be trademarks of their respective
> owners.
@@ -28,7 +28,7 @@ Table 1. Revision history
| Revision | Date | Description |
|----------|----------------|---------------------------------------------------------|
-| 01 | July 2023 | Current release of Ansible Modules for Dell Unity 1.7.1 |
+| 01 | March 2024 | Current release of Ansible Modules for Dell Unity 2.0.0 |
Product Description
-------------------
@@ -38,9 +38,7 @@ New features & enhancements
---------------------------
This release has the following changes -
-- Support addition of host from the Host List to NFS Export in nfs module.
-- Support enable/disable advanced dedup in volume module.
-- Add synchronous replication support for filesystem.
+- Adding support for Unity Puffin v5.4.
Known issues
------------
@@ -71,7 +69,7 @@ for Unity GitHub](https://github.com/dell/ansible-unity/) page.
Documentation
-------------
-The documentation is available on [Ansible Modules for Unity GitHub](https://github.com/dell/ansible-unity/tree/1.7.1/docs)
+The documentation is available on [Ansible Modules for Unity GitHub](https://github.com/dell/ansible-unity/tree/2.0.0/docs)
page. It includes the following:
- README
- Release Notes (this document)
diff --git a/ansible_collections/dellemc/unity/docs/SECURITY.md b/ansible_collections/dellemc/unity/docs/SECURITY.md
index f77239eac..4388acc49 100644
--- a/ansible_collections/dellemc/unity/docs/SECURITY.md
+++ b/ansible_collections/dellemc/unity/docs/SECURITY.md
@@ -12,7 +12,7 @@ You may obtain a copy of the License at
The Ansible modules for Dell Unity repository are inspected for security vulnerabilities via blackduck scans and static code analysis.
-In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-unity/blob/1.7.1/docs/CONTRIBUTING.md#Pull-requests) for more information.
+In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-unity/blob/2.0.0/docs/CONTRIBUTING.md#Pull-requests) for more information.
## Reporting a vulnerability
diff --git a/ansible_collections/dellemc/unity/meta/runtime.yml b/ansible_collections/dellemc/unity/meta/runtime.yml
index 82e44c7f7..9f0f69044 100644
--- a/ansible_collections/dellemc/unity/meta/runtime.yml
+++ b/ansible_collections/dellemc/unity/meta/runtime.yml
@@ -1,79 +1,64 @@
---
-requires_ansible: ">=2.13"
+requires_ansible: ">=2.14"
plugin_routing:
modules:
dellemc_unity_info:
- redirect: dellemc.unity.info
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use info instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use info instead.
dellemc_unity_gatherfacts:
- redirect: dellemc.unity.info
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use info instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use info instead.
dellemc_unity_consistencygroup:
- redirect: dellemc.unity.consistencygroup
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use consistencygroup instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use consistencygroup instead.
dellemc_unity_filesystem_snapshot:
- redirect: dellemc.unity.filesystem_snapshot
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use filesystem_snapshot instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use filesystem_snapshot instead.
dellemc_unity_filesystem:
- redirect: dellemc.unity.filesystem
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use filesystem instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use filesystem instead.
dellemc_unity_host:
- redirect: dellemc.unity.host
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use host instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use host instead.
dellemc_unity_nasserver:
- redirect: dellemc.unity.nasserver
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use nasserver instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use nasserver instead.
dellemc_unity_nfs:
- redirect: dellemc.unity.nfs
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use nfs instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use nfs instead.
dellemc_unity_smbshare:
- redirect: dellemc.unity.smbshare
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use smbshare instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use smbshare instead.
dellemc_unity_snapshot:
- redirect: dellemc.unity.snapshot
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use snapshot instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use snapshot instead.
dellemc_unity_snapshotschedule:
- redirect: dellemc.unity.snapshotschedule
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use snapshotschedule instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use snapshotschedule instead.
dellemc_unity_storagepool:
- redirect: dellemc.unity.storagepool
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use storagepool instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use storagepool instead.
dellemc_unity_tree_quota:
- redirect: dellemc.unity.tree_quota
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use tree_quota instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use tree_quota instead.
dellemc_unity_user_quota:
- redirect: dellemc.unity.user_quota
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use user_quota instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use user_quota instead.
dellemc_unity_volume:
- redirect: dellemc.unity.volume
- deprecation:
- removal_date: "2024-03-31"
- warning_text: Use volume instead.
+ tombstone:
+ removal_date: "2024-03-22"
+ warning_text: Use volume instead.
diff --git a/ansible_collections/dellemc/unity/plugins/doc_fragments/unity.py b/ansible_collections/dellemc/unity/plugins/doc_fragments/unity.py
index 0df468567..a712a8037 100644
--- a/ansible_collections/dellemc/unity/plugins/doc_fragments/unity.py
+++ b/ansible_collections/dellemc/unity/plugins/doc_fragments/unity.py
@@ -1,4 +1,4 @@
-# Copyright: (c) 2020, Dell Technologies.
+# Copyright: (c) 2024, Dell Technologies.
# Apache License version 2.0 (see MODULE-LICENSE or http://www.apache.org/licenses/LICENSE-2.0.txt)
from __future__ import absolute_import, division, print_function
@@ -44,7 +44,7 @@ class ModuleDocFragment(object):
default: 443
requirements:
- A Dell Unity Storage device version 5.1 or later.
- - Ansible-core 2.13 or later.
+ - Ansible-core 2.14 or later.
- Python 3.9, 3.10 or 3.11.
- Storops Python SDK 1.2.11.
notes:
diff --git a/ansible_collections/dellemc/unity/plugins/modules/consistencygroup.py b/ansible_collections/dellemc/unity/plugins/modules/consistencygroup.py
index e0d6a6c06..c5c4b5599 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/consistencygroup.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/consistencygroup.py
@@ -267,8 +267,8 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
cg_id: "{{cg_id}}"
hosts:
- - host_name: "10.226.198.248"
- - host_id: "Host_511"
+ - host_name: "10.226.198.248"
+ - host_id: "Host_511"
mapping_state: "mapped"
state: "present"
@@ -280,8 +280,8 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
cg_id: "{{cg_id}}"
hosts:
- - host_id: "Host_511"
- - host_name: "10.226.198.248"
+ - host_id: "Host_511"
+ - host_name: "10.226.198.248"
mapping_state: "unmapped"
state: "present"
@@ -293,8 +293,8 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
cg_name: "{{new_cg_name}}"
volumes:
- - vol_name: "Ansible_Test-3"
- - vol_id: "sv_1744"
+ - vol_name: "Ansible_Test-3"
+ - vol_id: "sv_1744"
vol_state: "{{vol_state_absent}}"
state: "present"
@@ -315,16 +315,16 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
cg_id: "cg_id_1"
replication_params:
- destination_cg_name: "destination_cg_1"
- replication_mode: "asynchronous"
- rpo: 60
- replication_type: "remote"
- remote_system:
- remote_system_host: '10.1.2.3'
- remote_system_verifycert: false
- remote_system_username: 'username'
- remote_system_password: 'password'
- destination_pool_name: "pool_test_1"
+ destination_cg_name: "destination_cg_1"
+ replication_mode: "asynchronous"
+ rpo: 60
+ replication_type: "remote"
+ remote_system:
+ remote_system_host: '10.1.2.3'
+ remote_system_verifycert: false
+ remote_system_username: 'username'
+ remote_system_password: 'password'
+ destination_pool_name: "pool_test_1"
replication_state: "enable"
state: "present"
diff --git a/ansible_collections/dellemc/unity/plugins/modules/filesystem.py b/ansible_collections/dellemc/unity/plugins/modules/filesystem.py
index 95cffeec6..d8057ac4e 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/filesystem.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/filesystem.py
@@ -327,10 +327,10 @@ EXAMPLES = r"""
pool_name: "pool_1"
size: 5
quota_config:
- grace_period: 8
- grace_period_unit: "days"
- default_soft_limit: 10
- is_user_quota_enabled: false
+ grace_period: 8
+ grace_period_unit: "days"
+ default_soft_limit: 10
+ is_user_quota_enabled: false
state: "present"
- name: Expand FileSystem size
@@ -451,7 +451,7 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
filesystem_id: "rs_405"
replication_params:
- replication_name: "test_replication"
+ replication_name: "test_replication"
replication_state: "disable"
state: "present"
"""
diff --git a/ansible_collections/dellemc/unity/plugins/modules/filesystem_snapshot.py b/ansible_collections/dellemc/unity/plugins/modules/filesystem_snapshot.py
index a82fbe89b..93258c84b 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/filesystem_snapshot.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/filesystem_snapshot.py
@@ -112,91 +112,91 @@ notes:
'''
EXAMPLES = r'''
- - name: Create Filesystem Snapshot
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "ansible_test_FS_snap"
- filesystem_name: "ansible_test_FS"
- nas_server_name: "lglad069"
- description: "Created using playbook"
- auto_delete: true
- fs_access_type: "Protocol"
- state: "present"
-
- - name: Create Filesystem Snapshot with expiry time
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "ansible_test_FS_snap_1"
- filesystem_name: "ansible_test_FS_1"
- nas_server_name: "lglad069"
- description: "Created using playbook"
- expiry_time: "04/15/2021 2:30"
- fs_access_type: "Protocol"
- state: "present"
-
- - name: Get Filesystem Snapshot Details using Name
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "ansible_test_FS_snap"
- state: "present"
-
- - name: Get Filesystem Snapshot Details using ID
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_id: "10008000403"
- state: "present"
-
- - name: Update Filesystem Snapshot attributes
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "ansible_test_FS_snap"
- description: "Description updated"
- auto_delete: false
- expiry_time: "04/15/2021 5:30"
- state: "present"
-
- - name: Update Filesystem Snapshot attributes using ID
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_id: "10008000403"
- expiry_time: "04/18/2021 8:30"
- state: "present"
-
- - name: Delete Filesystem Snapshot using Name
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "ansible_test_FS_snap"
- state: "absent"
-
- - name: Delete Filesystem Snapshot using ID
- dellemc.unity.filesystem_snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_id: "10008000403"
- state: "absent"
+- name: Create Filesystem Snapshot
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "ansible_test_FS_snap"
+ filesystem_name: "ansible_test_FS"
+ nas_server_name: "lglad069"
+ description: "Created using playbook"
+ auto_delete: true
+ fs_access_type: "Protocol"
+ state: "present"
+
+- name: Create Filesystem Snapshot with expiry time
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "ansible_test_FS_snap_1"
+ filesystem_name: "ansible_test_FS_1"
+ nas_server_name: "lglad069"
+ description: "Created using playbook"
+ expiry_time: "04/15/2021 2:30"
+ fs_access_type: "Protocol"
+ state: "present"
+
+- name: Get Filesystem Snapshot Details using Name
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "ansible_test_FS_snap"
+ state: "present"
+
+- name: Get Filesystem Snapshot Details using ID
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_id: "10008000403"
+ state: "present"
+
+- name: Update Filesystem Snapshot attributes
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "ansible_test_FS_snap"
+ description: "Description updated"
+ auto_delete: false
+ expiry_time: "04/15/2021 5:30"
+ state: "present"
+
+- name: Update Filesystem Snapshot attributes using ID
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_id: "10008000403"
+ expiry_time: "04/18/2021 8:30"
+ state: "present"
+
+- name: Delete Filesystem Snapshot using Name
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "ansible_test_FS_snap"
+ state: "absent"
+
+- name: Delete Filesystem Snapshot using ID
+ dellemc.unity.filesystem_snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_id: "10008000403"
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/info.py b/ansible_collections/dellemc/unity/plugins/modules/info.py
index 641074286..b924d674a 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/info.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/info.py
@@ -64,220 +64,220 @@ notes:
'''
EXAMPLES = r'''
- - name: Get detailed list of Unity entities
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - host
- - fc_initiator
- - iscsi_initiator
- - cg
- - storage_pool
- - vol
- - snapshot_schedule
- - nas_server
- - file_system
- - snapshot
- - nfs_export
- - smb_share
- - user_quota
- - tree_quota
- - disk_group
- - nfs_server
- - cifs_server
- - ethernet_port
- - file_interface
- - replication_session
-
- - name: Get information of Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
-
- - name: Get list of hosts on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - host
-
- - name: Get list of FC initiators on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - fc_initiator
-
- - name: Get list of ISCSI initiators on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - iscsi_initiator
-
- - name: Get list of consistency groups on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - cg
-
- - name: Get list of storage pools on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - storage_pool
-
- - name: Get list of volumes on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - vol
-
- - name: Get list of snapshot schedules on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - snapshot_schedule
-
- - name: Get list of NAS Servers on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - nas_server
-
- - name: Get list of File Systems on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - file_system
-
- - name: Get list of Snapshots on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - snapshot
-
- - name: Get list of NFS exports on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - nfs_export
-
- - name: Get list of SMB shares on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - smb_share
-
- - name: Get list of user quotas on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - user_quota
-
- - name: Get list of quota trees on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - tree_quota
-
- - name: Get list of disk groups on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - disk_group
-
- - name: Get list of NFS Servers on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - nfs_server
-
- - name: Get list of CIFS Servers on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - cifs_server
-
- - name: Get list of ethernet ports on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - ethernet_port
-
- - name: Get list of file interfaces on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - file_interface
-
- - name: Get list of replication sessions on Unity array
- dellemc.unity.info:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- gather_subset:
- - replication_session
+- name: Get detailed list of Unity entities
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - host
+ - fc_initiator
+ - iscsi_initiator
+ - cg
+ - storage_pool
+ - vol
+ - snapshot_schedule
+ - nas_server
+ - file_system
+ - snapshot
+ - nfs_export
+ - smb_share
+ - user_quota
+ - tree_quota
+ - disk_group
+ - nfs_server
+ - cifs_server
+ - ethernet_port
+ - file_interface
+ - replication_session
+
+- name: Get information of Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+
+- name: Get list of hosts on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - host
+
+- name: Get list of FC initiators on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - fc_initiator
+
+- name: Get list of ISCSI initiators on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - iscsi_initiator
+
+- name: Get list of consistency groups on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - cg
+
+- name: Get list of storage pools on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - storage_pool
+
+- name: Get list of volumes on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - vol
+
+- name: Get list of snapshot schedules on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - snapshot_schedule
+
+- name: Get list of NAS Servers on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - nas_server
+
+- name: Get list of File Systems on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - file_system
+
+- name: Get list of Snapshots on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - snapshot
+
+- name: Get list of NFS exports on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - nfs_export
+
+- name: Get list of SMB shares on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - smb_share
+
+- name: Get list of user quotas on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - user_quota
+
+- name: Get list of quota trees on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - tree_quota
+
+- name: Get list of disk groups on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - disk_group
+
+- name: Get list of NFS Servers on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - nfs_server
+
+- name: Get list of CIFS Servers on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - cifs_server
+
+- name: Get list of ethernet ports on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - ethernet_port
+
+- name: Get list of file interfaces on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - file_interface
+
+- name: Get list of replication sessions on Unity array
+ dellemc.unity.info:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ gather_subset:
+ - replication_session
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/interface.py b/ansible_collections/dellemc/unity/plugins/modules/interface.py
index 2523f940e..70be8021a 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/interface.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/interface.py
@@ -79,55 +79,55 @@ notes:
EXAMPLES = r'''
- - name: Add Interface as Backup to NAS Server
- dellemc.unity.interface:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- ethernet_port_name: "SP A 4-Port Card Ethernet Port 0"
- role: "BACKUP"
- interface_ip: "xx.xx.xx.xx"
- netmask: "xx.xx.xx.xx"
- gateway: "xx.xx.xx.xx"
- vlan_id: 324
- state: "present"
-
- - name: Add Interface as Production to NAS Server
- dellemc.unity.interface:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- ethernet_port_name: "SP A 4-Port Card Ethernet Port 0"
- role: "PRODUCTION"
- interface_ip: "xx.xx.xx.xx"
- netmask: "xx.xx.xx.xx"
- gateway: "xx.xx.xx.xx"
- vlan_id: 324
- state: "present"
-
- - name: Get interface details
- dellemc.unity.interface:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- interface_ip: "xx.xx.xx.xx"
- state: "present"
-
- - name: Delete Interface
- dellemc.unity.interface:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- interface_ip: "xx.xx.xx.xx"
- state: "absent"
+- name: Add Interface as Backup to NAS Server
+ dellemc.unity.interface:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ ethernet_port_name: "SP A 4-Port Card Ethernet Port 0"
+ role: "BACKUP"
+ interface_ip: "xx.xx.xx.xx"
+ netmask: "xx.xx.xx.xx"
+ gateway: "xx.xx.xx.xx"
+ vlan_id: 324
+ state: "present"
+
+- name: Add Interface as Production to NAS Server
+ dellemc.unity.interface:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ ethernet_port_name: "SP A 4-Port Card Ethernet Port 0"
+ role: "PRODUCTION"
+ interface_ip: "xx.xx.xx.xx"
+ netmask: "xx.xx.xx.xx"
+ gateway: "xx.xx.xx.xx"
+ vlan_id: 324
+ state: "present"
+
+- name: Get interface details
+ dellemc.unity.interface:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ interface_ip: "xx.xx.xx.xx"
+ state: "present"
+
+- name: Delete Interface
+ dellemc.unity.interface:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ interface_ip: "xx.xx.xx.xx"
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/nasserver.py b/ansible_collections/dellemc/unity/plugins/modules/nasserver.py
index 925cc932e..446611f4e 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/nasserver.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/nasserver.py
@@ -208,138 +208,138 @@ notes:
EXAMPLES = r'''
- - name: Get Details of NAS Server
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "{{nas_server_name}}"
- state: "present"
-
- - name: Modify Details of NAS Server
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "{{nas_server_name}}"
- nas_server_new_name: "updated_sample_nas_server"
- is_replication_destination: false
- is_backup_only: false
- is_multiprotocol_enabled: true
- allow_unmapped_user: true
- default_unix_user: "default_unix_sample_user"
- default_windows_user: "default_windows_sample_user"
- enable_windows_to_unix_username_mapping: true
- current_unix_directory_service: "LDAP"
- is_packet_reflect_enabled: true
- state: "present"
-
- - name: Enable replication for NAS Server on Local System
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_id: "nas_10"
- replication_reuse_resource: false
- replication_params:
- replication_name: "test_replication"
- destination_nas_server_name: "destination_nas"
- replication_mode: "asynchronous"
- rpo: 60
- replication_type: "local"
- destination_pool_name: "Pool_Ansible_Neo_DND"
- destination_sp: "SPA"
- is_backup: true
- replication_state: "enable"
- state: "present"
-
- - name: Enable replication for NAS Server on Remote System
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- replication_reuse_resource: false
- replication_params:
- replication_name: "test_replication"
- destination_nas_server_name: "destination_nas"
- replication_mode: "asynchronous"
- rpo: 60
- replication_type: "remote"
- remote_system:
- remote_system_host: '10.10.10.10'
- remote_system_verifycert: false
- remote_system_username: 'test1'
- remote_system_password: 'test1!'
- destination_pool_name: "fastVP_pool"
- destination_sp: "SPA"
- is_backup: true
- replication_state: "enable"
- state: "present"
-
- - name: Enable replication for NAS Server on Remote System in existing NAS Server
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- replication_reuse_resource: true
- replication_params:
- destination_nas_server_name: "destination_nas"
- replication_mode: "asynchronous"
- rpo: 60
- replication_type: "remote"
- replication_name: "test_replication"
- remote_system:
- remote_system_host: '10.10.10.10'
- remote_system_verifycert: false
- remote_system_username: 'test1'
- remote_system_password: 'test1!'
- destination_pool_name: "fastVP_pool"
- replication_state: "enable"
- state: "present"
-
- - name: Modify replication on the nasserver
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- replication_params:
- replication_name: "test_repl"
- new_replication_name: "test_repl_updated"
- replication_mode: "asynchronous"
- rpo: 50
- replication_state: "enable"
- state: "present"
-
- - name: Disable replication on the nasserver
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- replication_state: "disable"
- state: "present"
-
- - name: Disable replication by specifying replication_name on the nasserver
- dellemc.unity.nasserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- replication_params:
- replication_name: "test_replication"
- replication_state: "disable"
- state: "present"
+- name: Get Details of NAS Server
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "{{nas_server_name}}"
+ state: "present"
+
+- name: Modify Details of NAS Server
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "{{nas_server_name}}"
+ nas_server_new_name: "updated_sample_nas_server"
+ is_replication_destination: false
+ is_backup_only: false
+ is_multiprotocol_enabled: true
+ allow_unmapped_user: true
+ default_unix_user: "default_unix_sample_user"
+ default_windows_user: "default_windows_sample_user"
+ enable_windows_to_unix_username_mapping: true
+ current_unix_directory_service: "LDAP"
+ is_packet_reflect_enabled: true
+ state: "present"
+
+- name: Enable replication for NAS Server on Local System
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_id: "nas_10"
+ replication_reuse_resource: false
+ replication_params:
+ replication_name: "test_replication"
+ destination_nas_server_name: "destination_nas"
+ replication_mode: "asynchronous"
+ rpo: 60
+ replication_type: "local"
+ destination_pool_name: "Pool_Ansible_Neo_DND"
+ destination_sp: "SPA"
+ is_backup: true
+ replication_state: "enable"
+ state: "present"
+
+- name: Enable replication for NAS Server on Remote System
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ replication_reuse_resource: false
+ replication_params:
+ replication_name: "test_replication"
+ destination_nas_server_name: "destination_nas"
+ replication_mode: "asynchronous"
+ rpo: 60
+ replication_type: "remote"
+ remote_system:
+ remote_system_host: '10.10.10.10'
+ remote_system_verifycert: false
+ remote_system_username: 'test1'
+ remote_system_password: 'test1!'
+ destination_pool_name: "fastVP_pool"
+ destination_sp: "SPA"
+ is_backup: true
+ replication_state: "enable"
+ state: "present"
+
+- name: Enable replication for NAS Server on Remote System in existing NAS Server
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ replication_reuse_resource: true
+ replication_params:
+ destination_nas_server_name: "destination_nas"
+ replication_mode: "asynchronous"
+ rpo: 60
+ replication_type: "remote"
+ replication_name: "test_replication"
+ remote_system:
+ remote_system_host: '10.10.10.10'
+ remote_system_verifycert: false
+ remote_system_username: 'test1'
+ remote_system_password: 'test1!'
+ destination_pool_name: "fastVP_pool"
+ replication_state: "enable"
+ state: "present"
+
+- name: Modify replication on the nasserver
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ replication_params:
+ replication_name: "test_repl"
+ new_replication_name: "test_repl_updated"
+ replication_mode: "asynchronous"
+ rpo: 50
+ replication_state: "enable"
+ state: "present"
+
+- name: Disable replication on the nasserver
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ replication_state: "disable"
+ state: "present"
+
+- name: Disable replication by specifying replication_name on the nasserver
+ dellemc.unity.nasserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ replication_params:
+ replication_name: "test_replication"
+ replication_state: "disable"
+ state: "present"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/nfs.py b/ansible_collections/dellemc/unity/plugins/modules/nfs.py
index 473e40b2a..d0426f9cf 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/nfs.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/nfs.py
@@ -395,15 +395,15 @@ EXAMPLES = r"""
filesystem_id: "fs_377"
adv_host_mgmt_enabled: false
no_access_hosts:
- - domain: "google.com"
+ - domain: "google.com"
read_only_hosts:
- - netgroup: "netgroup_admin"
+ - netgroup: "netgroup_admin"
read_only_root_hosts:
- - host_name: "host5"
+ - host_name: "host5"
read_write_hosts:
- - subnet: "168.159.57.4/255.255.255.0"
+ - subnet: "168.159.57.4/255.255.255.0"
read_write_root_hosts:
- - ip_address: "10.255.2.4"
+ - ip_address: "10.255.2.4"
host_state: "present-in-export"
state: "present"
@@ -417,15 +417,15 @@ EXAMPLES = r"""
filesystem_id: "fs_377"
adv_host_mgmt_enabled: false
no_access_hosts:
- - domain: "google.com"
+ - domain: "google.com"
read_only_hosts:
- - netgroup: "netgroup_admin"
+ - netgroup: "netgroup_admin"
read_only_root_hosts:
- - host_name: "host5"
+ - host_name: "host5"
read_write_hosts:
- - subnet: "168.159.57.4/255.255.255.0"
+ - subnet: "168.159.57.4/255.255.255.0"
read_write_root_hosts:
- - ip_address: "10.255.2.4"
+ - ip_address: "10.255.2.4"
host_state: "absent-in-export"
state: "present"
diff --git a/ansible_collections/dellemc/unity/plugins/modules/nfsserver.py b/ansible_collections/dellemc/unity/plugins/modules/nfsserver.py
index 30d2c787f..90aba930e 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/nfsserver.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/nfsserver.py
@@ -84,56 +84,56 @@ notes:
EXAMPLES = r'''
- - name: Create NFS server with kdctype as Windows
- dellemc.unity.nfsserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- host_name: "dummy_nas23"
- is_secure_enabled: true
- kerberos_domain_controller_type: "WINDOWS"
- kerberos_domain_controller_username: "administrator"
- kerberos_domain_controller_password: "Password123!"
- is_extended_credentials_enabled: true
- nfs_v4_enabled: true
- state: "present"
-
- - name: Create NFS server with kdctype as Unix
- dellemc.unity.nfsserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- host_name: "dummy_nas23"
- is_secure_enabled: true
- kerberos_domain_controller_type: "UNIX"
- is_extended_credentials_enabled: true
- nfs_v4_enabled: true
- state: "present"
-
- - name: Get NFS server details
- dellemc.unity.nfsserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- state: "present"
-
- - name: Delete NFS server
- dellemc.unity.nfsserver:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- nas_server_name: "dummy_nas"
- kerberos_domain_controller_username: "administrator"
- kerberos_domain_controller_password: "Password123!"
- unjoin_server_account: false
- state: "absent"
+- name: Create NFS server with kdctype as Windows
+ dellemc.unity.nfsserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ host_name: "dummy_nas23"
+ is_secure_enabled: true
+ kerberos_domain_controller_type: "WINDOWS"
+ kerberos_domain_controller_username: "administrator"
+ kerberos_domain_controller_password: "Password123!"
+ is_extended_credentials_enabled: true
+ nfs_v4_enabled: true
+ state: "present"
+
+- name: Create NFS server with kdctype as Unix
+ dellemc.unity.nfsserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ host_name: "dummy_nas23"
+ is_secure_enabled: true
+ kerberos_domain_controller_type: "UNIX"
+ is_extended_credentials_enabled: true
+ nfs_v4_enabled: true
+ state: "present"
+
+- name: Get NFS server details
+ dellemc.unity.nfsserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ state: "present"
+
+- name: Delete NFS server
+ dellemc.unity.nfsserver:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ nas_server_name: "dummy_nas"
+ kerberos_domain_controller_username: "administrator"
+ kerberos_domain_controller_password: "Password123!"
+ unjoin_server_account: false
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/snapshot.py b/ansible_collections/dellemc/unity/plugins/modules/snapshot.py
index 5660e3c5c..efdd6e292 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/snapshot.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/snapshot.py
@@ -112,79 +112,79 @@ notes:
'''
EXAMPLES = r'''
- - name: Create a Snapshot for a CG
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- port: "{{port}}"
- cg_name: "{{cg_name}}"
- snapshot_name: "{{cg_snapshot_name}}"
- description: "{{description}}"
- auto_delete: false
- state: "present"
-
- - name: Create a Snapshot for a volume with Host attached
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- port: "{{port}}"
- vol_name: "{{vol_name}}"
- snapshot_name: "{{vol_snapshot_name}}"
- description: "{{description}}"
- expiry_time: "04/15/2025 16:30"
- host_name: "{{host_name}}"
- host_state: "mapped"
- state: "present"
-
- - name: Unmap a host for a Snapshot
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- port: "{{port}}"
- snapshot_name: "{{vol_snapshot_name}}"
- host_name: "{{host_name}}"
- host_state: "unmapped"
- state: "present"
-
- - name: Map snapshot to a host
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- port: "{{port}}"
- snapshot_name: "{{vol_snapshot_name}}"
- host_name: "{{host_name}}"
- host_state: "mapped"
- state: "present"
-
- - name: Update attributes of a Snapshot for a volume
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "{{vol_snapshot_name}}"
- new_snapshot_name: "{{new_snapshot_name}}"
- description: "{{new_description}}"
- host_name: "{{host_name}}"
- host_state: "unmapped"
- state: "present"
-
- - name: Delete Snapshot of CG
- dellemc.unity.snapshot:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- snapshot_name: "{{cg_snapshot_name}}"
- state: "absent"
+- name: Create a Snapshot for a CG
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ port: "{{port}}"
+ cg_name: "{{cg_name}}"
+ snapshot_name: "{{cg_snapshot_name}}"
+ description: "{{description}}"
+ auto_delete: false
+ state: "present"
+
+- name: Create a Snapshot for a volume with Host attached
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ port: "{{port}}"
+ vol_name: "{{vol_name}}"
+ snapshot_name: "{{vol_snapshot_name}}"
+ description: "{{description}}"
+ expiry_time: "04/15/2025 16:30"
+ host_name: "{{host_name}}"
+ host_state: "mapped"
+ state: "present"
+
+- name: Unmap a host for a Snapshot
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ port: "{{port}}"
+ snapshot_name: "{{vol_snapshot_name}}"
+ host_name: "{{host_name}}"
+ host_state: "unmapped"
+ state: "present"
+
+- name: Map snapshot to a host
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ port: "{{port}}"
+ snapshot_name: "{{vol_snapshot_name}}"
+ host_name: "{{host_name}}"
+ host_state: "mapped"
+ state: "present"
+
+- name: Update attributes of a Snapshot for a volume
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "{{vol_snapshot_name}}"
+ new_snapshot_name: "{{new_snapshot_name}}"
+ description: "{{new_description}}"
+ host_name: "{{host_name}}"
+ host_state: "unmapped"
+ state: "present"
+
+- name: Delete Snapshot of CG
+ dellemc.unity.snapshot:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ snapshot_name: "{{cg_snapshot_name}}"
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/snapshotschedule.py b/ansible_collections/dellemc/unity/plugins/modules/snapshotschedule.py
index 1d6e6ec6c..0690239ea 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/snapshotschedule.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/snapshotschedule.py
@@ -142,8 +142,8 @@ EXAMPLES = r"""
name: "Ansible_Every_Day_Testing"
type: "every_day"
hours_of_day:
- - 8
- - 14
+ - 8
+ - 14
auto_delete: true
state: "{{state_present}}"
@@ -169,8 +169,8 @@ EXAMPLES = r"""
name: "Ansible_Every_Week_Testing"
type: "every_week"
days_of_week:
- - MONDAY
- - FRIDAY
+ - MONDAY
+ - FRIDAY
hour: 12
minute: 30
desired_retention: 200
diff --git a/ansible_collections/dellemc/unity/plugins/modules/storagepool.py b/ansible_collections/dellemc/unity/plugins/modules/storagepool.py
index 6438e9c6a..b7b8bfd69 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/storagepool.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/storagepool.py
@@ -212,22 +212,21 @@ EXAMPLES = r'''
pool_name: "Test"
pool_description: "test pool"
raid_groups:
- disk_group_id : "dg_16"
- disk_num : 2
- raid_type : "RAID10"
- stripe_width : "BEST_FIT"
- alert_threshold : 50
- is_harvest_enabled : true
- pool_harvest_high_threshold : 60
- pool_harvest_low_threshold : 40
- is_snap_harvest_enabled : true
- snap_harvest_high_threshold : 70
- snap_harvest_low_threshold : 50
+ disk_group_id: "dg_16"
+ disk_num: 2
+ raid_type: "RAID10"
+ stripe_width: "BEST_FIT"
+ alert_threshold: 50
+ is_harvest_enabled: true
+ pool_harvest_high_threshold: 60
+ pool_harvest_low_threshold: 40
+ is_snap_harvest_enabled: true
+ snap_harvest_high_threshold: 70
+ snap_harvest_low_threshold: 50
fast_vp: "enabled"
fast_cache: "enabled"
- pool_type : "DYNAMIC"
+ pool_type: "DYNAMIC"
state: "present"
-
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/tree_quota.py b/ansible_collections/dellemc/unity/plugins/modules/tree_quota.py
index b066a01fa..dd0a76374 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/tree_quota.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/tree_quota.py
@@ -101,97 +101,97 @@ notes:
'''
EXAMPLES = r'''
- - name: Get quota tree details by quota tree id
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- tree_quota_id: "treequota_171798700679_10"
- state: "present"
-
- - name: Get quota tree details by quota tree path
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_name: "fs_2171"
- nas_server_id: "nas_21"
- path: "/test"
- state: "present"
-
- - name: Create quota tree for a filesystem with filesystem id
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- hard_limit: 6
- cap_unit: "TB"
- soft_limit: 5
- path: "/test_new"
- state: "present"
-
- - name: Create quota tree for a filesystem with filesystem name
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_name: "Test_filesystem"
- nas_server_name: "lglad068"
- hard_limit: 6
- cap_unit: "TB"
- soft_limit: 5
- path: "/test_new"
- state: "present"
-
- - name: Modify quota tree limit usage by quota tree path
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- path: "/test_new"
- hard_limit: 10
- cap_unit: "TB"
- soft_limit: 8
- state: "present"
-
- - name: Modify quota tree by quota tree id
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- tree_quota_id: "treequota_171798700679_10"
- hard_limit: 12
- cap_unit: "TB"
- soft_limit: 10
- state: "present"
-
- - name: Delete quota tree by quota tree id
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- tree_quota_id: "treequota_171798700679_10"
- state: "absent"
-
- - name: Delete quota tree by path
- dellemc.unity.tree_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- path: "/test_new"
- state: "absent"
+- name: Get quota tree details by quota tree id
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ tree_quota_id: "treequota_171798700679_10"
+ state: "present"
+
+- name: Get quota tree details by quota tree path
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_name: "fs_2171"
+ nas_server_id: "nas_21"
+ path: "/test"
+ state: "present"
+
+- name: Create quota tree for a filesystem with filesystem id
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ hard_limit: 6
+ cap_unit: "TB"
+ soft_limit: 5
+ path: "/test_new"
+ state: "present"
+
+- name: Create quota tree for a filesystem with filesystem name
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_name: "Test_filesystem"
+ nas_server_name: "lglad068"
+ hard_limit: 6
+ cap_unit: "TB"
+ soft_limit: 5
+ path: "/test_new"
+ state: "present"
+
+- name: Modify quota tree limit usage by quota tree path
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ path: "/test_new"
+ hard_limit: 10
+ cap_unit: "TB"
+ soft_limit: 8
+ state: "present"
+
+- name: Modify quota tree by quota tree id
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ tree_quota_id: "treequota_171798700679_10"
+ hard_limit: 12
+ cap_unit: "TB"
+ soft_limit: 10
+ state: "present"
+
+- name: Delete quota tree by quota tree id
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ tree_quota_id: "treequota_171798700679_10"
+ state: "absent"
+
+- name: Delete quota tree by path
+ dellemc.unity.tree_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ path: "/test_new"
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/user_quota.py b/ansible_collections/dellemc/unity/plugins/modules/user_quota.py
index 06413aa53..0a09ff44d 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/user_quota.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/user_quota.py
@@ -123,170 +123,170 @@ notes:
'''
EXAMPLES = r'''
- - name: Get user quota details by user quota id
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- user_quota_id: "userquota_171798700679_0_123"
- state: "present"
-
- - name: Get user quota details by user quota uid/user name
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_name: "fs_2171"
- nas_server_id: "nas_21"
- user_name: "test"
- state: "present"
-
- - name: Create user quota for a filesystem with filesystem id
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- hard_limit: 6
- cap_unit: "TB"
- soft_limit: 5
- uid: "111"
- state: "present"
-
- - name: Create user quota for a filesystem with filesystem name
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_name: "Test_filesystem"
- nas_server_name: "lglad068"
- hard_limit: 6
- cap_unit: "TB"
- soft_limit: 5
- uid: "111"
- state: "present"
-
- - name: Modify user quota limit usage by user quota id
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- user_quota_id: "userquota_171798700679_0_123"
- hard_limit: 10
- cap_unit: "TB"
- soft_limit: 8
- state: "present"
-
- - name: Modify user quota by filesystem id and user quota uid/user_name
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- user_type: "Windows"
- win_domain: "prod"
- user_name: "sample"
- hard_limit: 12
- cap_unit: "TB"
- soft_limit: 10
- state: "present"
-
- - name: Delete user quota
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- win_domain: "prod"
- user_name: "sample"
- state: "absent"
-
- - name: Create user quota of a quota tree
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- tree_quota_id: "treequota_171798700679_4"
- user_type: "Windows"
- win_domain: "prod"
- user_name: "sample"
- soft_limit: 9
- cap_unit: "TB"
- state: "present"
-
- - name: Create user quota of a quota tree by quota tree path
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- path: "/sample"
- user_type: "Unix"
- user_name: "test"
- hard_limit: 2
- cap_unit: "TB"
- state: "present"
-
- - name: Modify user quota of a quota tree
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- tree_quota_id: "treequota_171798700679_4"
- user_type: "Windows"
- win_domain: "prod"
- user_name: "sample"
- soft_limit: 10
- cap_unit: "TB"
- state: "present"
-
- - name: Modify user quota of a quota tree by quota tree path
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- path: "/sample"
- user_type: "Windows"
- win_domain: "prod"
- user_name: "sample"
- hard_limit: 12
- cap_unit: "TB"
- state: "present"
-
- - name: Delete user quota of a quota tree by quota tree path
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- filesystem_id: "fs_2171"
- path: "/sample"
- win_domain: "prod"
- user_name: "sample"
- state: "absent"
-
- - name: Delete user quota of a quota tree by quota tree id
- dellemc.unity.user_quota:
- unispherehost: "{{unispherehost}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- tree_quota_id: "treequota_171798700679_4"
- win_domain: "prod"
- user_name: "sample"
- state: "absent"
+- name: Get user quota details by user quota id
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ user_quota_id: "userquota_171798700679_0_123"
+ state: "present"
+
+- name: Get user quota details by user quota uid/user name
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_name: "fs_2171"
+ nas_server_id: "nas_21"
+ user_name: "test"
+ state: "present"
+
+- name: Create user quota for a filesystem with filesystem id
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ hard_limit: 6
+ cap_unit: "TB"
+ soft_limit: 5
+ uid: "111"
+ state: "present"
+
+- name: Create user quota for a filesystem with filesystem name
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_name: "Test_filesystem"
+ nas_server_name: "lglad068"
+ hard_limit: 6
+ cap_unit: "TB"
+ soft_limit: 5
+ uid: "111"
+ state: "present"
+
+- name: Modify user quota limit usage by user quota id
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ user_quota_id: "userquota_171798700679_0_123"
+ hard_limit: 10
+ cap_unit: "TB"
+ soft_limit: 8
+ state: "present"
+
+- name: Modify user quota by filesystem id and user quota uid/user_name
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ user_type: "Windows"
+ win_domain: "prod"
+ user_name: "sample"
+ hard_limit: 12
+ cap_unit: "TB"
+ soft_limit: 10
+ state: "present"
+
+- name: Delete user quota
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ win_domain: "prod"
+ user_name: "sample"
+ state: "absent"
+
+- name: Create user quota of a quota tree
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ tree_quota_id: "treequota_171798700679_4"
+ user_type: "Windows"
+ win_domain: "prod"
+ user_name: "sample"
+ soft_limit: 9
+ cap_unit: "TB"
+ state: "present"
+
+- name: Create user quota of a quota tree by quota tree path
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ path: "/sample"
+ user_type: "Unix"
+ user_name: "test"
+ hard_limit: 2
+ cap_unit: "TB"
+ state: "present"
+
+- name: Modify user quota of a quota tree
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ tree_quota_id: "treequota_171798700679_4"
+ user_type: "Windows"
+ win_domain: "prod"
+ user_name: "sample"
+ soft_limit: 10
+ cap_unit: "TB"
+ state: "present"
+
+- name: Modify user quota of a quota tree by quota tree path
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ path: "/sample"
+ user_type: "Windows"
+ win_domain: "prod"
+ user_name: "sample"
+ hard_limit: 12
+ cap_unit: "TB"
+ state: "present"
+
+- name: Delete user quota of a quota tree by quota tree path
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ filesystem_id: "fs_2171"
+ path: "/sample"
+ win_domain: "prod"
+ user_name: "sample"
+ state: "absent"
+
+- name: Delete user quota of a quota tree by quota tree id
+ dellemc.unity.user_quota:
+ unispherehost: "{{unispherehost}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ validate_certs: "{{validate_certs}}"
+ tree_quota_id: "treequota_171798700679_4"
+ win_domain: "prod"
+ user_name: "sample"
+ state: "absent"
'''
RETURN = r'''
diff --git a/ansible_collections/dellemc/unity/plugins/modules/volume.py b/ansible_collections/dellemc/unity/plugins/modules/volume.py
index 81790ea24..4b23267ac 100644
--- a/ansible_collections/dellemc/unity/plugins/modules/volume.py
+++ b/ansible_collections/dellemc/unity/plugins/modules/volume.py
@@ -224,10 +224,10 @@ EXAMPLES = r"""
validate_certs: "{{validate_certs}}"
vol_id: "{{vol_id}}"
hosts:
- - host_name: "10.226.198.248"
- hlu: 1
- - host_id: "Host_929"
- hlu: 2
+ - host_name: "10.226.198.248"
+ hlu: 1
+ - host_id: "Host_929"
+ hlu: 2
mapping_state: "mapped"
state: "present"
diff --git a/ansible_collections/dellemc/unity/tests/sanity/ignore-2.13.txt b/ansible_collections/dellemc/unity/tests/sanity/ignore-2.16.txt
index a7aa13146..fcf3bcfba 100644
--- a/ansible_collections/dellemc/unity/tests/sanity/ignore-2.13.txt
+++ b/ansible_collections/dellemc/unity/tests/sanity/ignore-2.16.txt
@@ -17,16 +17,10 @@ plugins/modules/nfsserver.py validate-modules:missing-gplv3-license
plugins/modules/host.py import-2.7
plugins/modules/interface.py import-2.7
plugins/modules/nfs.py import-2.7
-plugins/modules/nfs.py import-3.5
plugins/modules/nfs.py compile-2.7
-plugins/modules/nfs.py compile-3.5
plugins/modules/filesystem.py compile-2.7
-plugins/modules/filesystem.py compile-3.5
plugins/modules/filesystem.py import-2.7
-plugins/modules/filesystem.py import-3.5
plugins/modules/interface.py validate-modules:missing-gplv3-license
plugins/modules/replication_session.py validate-modules:missing-gplv3-license
plugins/modules/replication_session.py import-2.7
-plugins/modules/replication_session.py import-3.5
plugins/modules/replication_session.py compile-2.7
-plugins/modules/replication_session.py compile-3.5
diff --git a/ansible_collections/dellemc/unity/tests/sanity/ignore-2.17.txt b/ansible_collections/dellemc/unity/tests/sanity/ignore-2.17.txt
new file mode 100644
index 000000000..c034ae364
--- /dev/null
+++ b/ansible_collections/dellemc/unity/tests/sanity/ignore-2.17.txt
@@ -0,0 +1,18 @@
+plugins/modules/consistencygroup.py validate-modules:missing-gplv3-license
+plugins/modules/filesystem.py validate-modules:missing-gplv3-license
+plugins/modules/filesystem_snapshot.py validate-modules:missing-gplv3-license
+plugins/modules/info.py validate-modules:missing-gplv3-license
+plugins/modules/host.py validate-modules:missing-gplv3-license
+plugins/modules/nasserver.py validate-modules:missing-gplv3-license
+plugins/modules/nfs.py validate-modules:missing-gplv3-license
+plugins/modules/smbshare.py validate-modules:missing-gplv3-license
+plugins/modules/snapshot.py validate-modules:missing-gplv3-license
+plugins/modules/snapshotschedule.py validate-modules:missing-gplv3-license
+plugins/modules/storagepool.py validate-modules:missing-gplv3-license
+plugins/modules/tree_quota.py validate-modules:missing-gplv3-license
+plugins/modules/user_quota.py validate-modules:missing-gplv3-license
+plugins/modules/volume.py validate-modules:missing-gplv3-license
+plugins/modules/cifsserver.py validate-modules:missing-gplv3-license
+plugins/modules/nfsserver.py validate-modules:missing-gplv3-license
+plugins/modules/interface.py validate-modules:missing-gplv3-license
+plugins/modules/replication_session.py validate-modules:missing-gplv3-license
diff --git a/ansible_collections/dellemc/unity/tests/requirements.txt b/ansible_collections/dellemc/unity/tests/unit/requirements.txt
index 3541acd15..3541acd15 100644
--- a/ansible_collections/dellemc/unity/tests/requirements.txt
+++ b/ansible_collections/dellemc/unity/tests/unit/requirements.txt