diff options
Diffstat (limited to 'ansible_collections/dellemc/openmanage/playbooks')
108 files changed, 7470 insertions, 0 deletions
diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml new file mode 100644 index 00000000..d8164065 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/dellemc_idrac_storage_volume.yml @@ -0,0 +1,110 @@ +--- +- hosts: idrac + connection: local + name: iDRAC storage volume configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create single volume. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "create" + controller_id: "RAID.Slot.1-1" + volumes: + - drives: + location: [5] + tags: + - create_single_volume + + - name: Create multiple volume. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + raid_reset_config: "True" + state: "create" + controller_id: "RAID.Slot.1-1" + volume_type: "RAID 1" + span_depth: 1 + span_length: 2 + number_dedicated_hot_spare: 1 + disk_cache_policy: "Enabled" + write_cache_policy: "WriteBackForce" + read_cache_policy: "ReadAhead" + stripe_size: 65536 + capacity: 100 + raid_init_operation: "Fast" + volumes: + - name: "volume_1" + drives: + id: ["Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1", + "Disk.Bay.2:Enclosure.Internal.0-1:RAID.Slot.1-1"] + - name: "volume_2" + volume_type: "RAID 5" + span_length: 3 + span_depth: 1 + drives: + location: [7, 3, 5] + disk_cache_policy: "Disabled" + write_cache_policy: "WriteBack" + read_cache_policy: "NoReadAhead" + stripe_size: 131072 + capacity: "200" + raid_init_operation: "None" + tags: + - create_multiple_volume + + - name: Delete single volume. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "delete" + volumes: + - name: "volume_1" + tags: + - delete_single_volume + + + - name: Delete multiple volume. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "delete" + volumes: + - name: "volume_1" + - name: "volume_2" + tags: + - delete_multiple_volume + + - name: View specific volume details. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "view" + controller_id: "RAID.Slot.1-1" + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - view_specific_volume + + - name: View all volume details. + dellemc_idrac_storage_volume: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "view" + tags: + - view_all_volume
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_eventing.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_eventing.yml new file mode 100644 index 00000000..c712288e --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_eventing.yml @@ -0,0 +1,62 @@ +--- +- hosts: idrac + connection: local + name: Configure the iDRAC eventing attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Setup iDRAC SMTP + dellemc_configure_idrac_eventing: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + smtp_ip_address: "0.0.0.0" + authentication: "Enabled" + username: "test" + password: "test" + + tags: + - idrac_smtp + + - name: Setup iDRAC SNMP Trap + dellemc_configure_idrac_eventing: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + snmp_trap_state: "Enabled" + destination_number: "2" + snmp_v3_username: "None" + destination: "1.1.1.1" + + tags: + - idrac_snmptrap + + - name: Setup iDRAC Email Alerts + dellemc_configure_idrac_eventing: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + email_alert_state: "Disabled" + address: "test@test.com" + alert_number: "1" + custom_message: "test" + + tags: + - idrac_email_alerts + + - name: Setup iDRAC Alerts + dellemc_configure_idrac_eventing: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + enable_alerts: "Disabled" + + tags: + - idrac_alerts
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_services.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_services.yml new file mode 100644 index 00000000..e0d4bbe8 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_configure_idrac_services.yml @@ -0,0 +1,46 @@ +--- +- hosts: idrac + connection: local + name: Configure the iDRAC services attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Setup iDRAC Webserver + dellemc_configure_idrac_services: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + ssl_encryption: "T_168_Bit_or_higher" + tls_protocol: "TLS_1_0_and_Higher" + + tags: + - idrac_webserver + + - name: Setup iDRAC SNMP + dellemc_configure_idrac_services: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + snmp_enable: "Enabled" + snmp_protocol: "All" + + tags: + - idrac_snmp + + - name: Setup iDRAC SNMP settings + dellemc_configure_idrac_services: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + ipmi_lan: + community_name: public + alert_port: 161 + trap_format: SNMPv3 + tags: + - idrac-snmp-settings diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_firmware_inventory.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_firmware_inventory.yml new file mode 100644 index 00000000..ac4736c5 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_firmware_inventory.yml @@ -0,0 +1,16 @@ +--- +- hosts: idrac + connection: local + name: Get Installed Firmware Inventory + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get Installed Firmware Inventory + dellemc_get_firmware_inventory: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_system_inventory.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_system_inventory.yml new file mode 100644 index 00000000..085b14bf --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_get_system_inventory.yml @@ -0,0 +1,16 @@ +--- +- hosts: idrac + connection: local + name: Get system inventory + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get system inventory + dellemc_get_system_inventory: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_idrac_lc_attributes.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_idrac_lc_attributes.yml new file mode 100644 index 00000000..51a06ad1 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_idrac_lc_attributes.yml @@ -0,0 +1,17 @@ +--- +- hosts: idrac + connection: local + name: Configure iDRAC CSIOR Setting + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure iDRAC CSIOR Setting + dellemc_idrac_lc_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + csior: "Enabled" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_system_lockdown_mode.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_system_lockdown_mode.yml new file mode 100644 index 00000000..61260e3e --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/dellemc_system_lockdown_mode.yml @@ -0,0 +1,17 @@ +--- +- hosts: idrac + connection: local + name: Configure System lockdown mode + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure System lockdown mode + dellemc_system_lockdown_mode: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + lockdown_mode: "Disabled"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_network.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_network.yml new file mode 100644 index 00000000..9ee11728 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_network.yml @@ -0,0 +1,75 @@ +--- +- hosts: idrac + connection: local + name: Configure the iDRAC network attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Register iDRAC on DNS + idrac_network: + idrac_ip: "{{idrac_ip}}" + idrac_user: "{{idrac_user}}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + register_idrac_on_dns: "Enabled" + dns_idrac_name: "idrac-3CZWCK2" + auto_config: "Enabled" + static_dns: "dell.com" + + tags: + - dns_register + + - name: Setup VLAN attributes + idrac_network: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + setup_idrac_nic_vlan: "Enabled" + + tags: + - setup_vlan + + - name: Setup iDRAC NIC + idrac_network: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + enable_nic: "Enabled" + nic_selection: "Dedicated" + failover_network: "T_None" + auto_detect: "Disabled" + auto_negotiation: "Enabled" + network_speed: "T_1000" + duplex_mode: "Full" + + tags: + - idrac_nic + + - name: Setup iDRAC IPv4 + idrac_network: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + enable_dhcp: "Enabled" + dns_from_dhcp: "Enabled" + enable_ipv4: "Enabled" + + tags: + - idrac_ipv4 + + - name: Setup iDRAC Static IPv4 + idrac_network: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + dns_from_dhcp: "Disabled" + + tags: + - idrac_staticipv4
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_timezone_ntp.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_timezone_ntp.yml new file mode 100644 index 00000000..c5fe7791 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/deprecated/idrac_timezone_ntp.yml @@ -0,0 +1,24 @@ +--- +- hosts: idrac + connection: local + name: Configure the iDRAC timezone attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Setup iDRAC Timezone + idrac_timezone_ntp: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + setup_idrac_timezone: "Singapore" + enable_ntp: "Disabled" + ntp_server_1: "100.100.25.1" + ntp_server_2: "100.100.26.2" + ntp_server_3: "100.100.27.3" + + tags: + - idrac_timezone
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_attributes.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_attributes.yml new file mode 100644 index 00000000..9a362176 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_attributes.yml @@ -0,0 +1,155 @@ +--- +- hosts: idrac + connection: local + name: Dell OpenManage Ansible iDRAC Certificates management. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update iDRAC attributes + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + SNMP.1.AgentCommunity: Enabled + tags: idrac + + - name: Update System attributes + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + system_attributes: + ThermalSettings.1.ThermalProfile: Sound Cap + tags: system + + - name: Update Lifecycle Controller attributes + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + lifecycle_controller_attributes: + LCAttributes.1.AutoUpdate: Enabled + tags: lc + + - name: Configure the iDRAC attributes for email alert settings. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + EmailAlert.1.CustomMsg: Display Message + EmailAlert.1.Enable: Enabled + EmailAlert.1.Address: test@test.com + tags: email-alerts + + - name: Configure the iDRAC attributes for SNMP alert settings. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + SNMPAlert.1.Destination: 192.168.0.2 + SNMPAlert.1.State: Enabled + SNMPAlert.1.SNMPv3Username: username + tags: snmp-alerts + + - name: Configure the iDRAC attributes for SMTP alert settings. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + RemoteHosts.1.SMTPServerIPAddress: 192.168.0.3 + RemoteHosts.1.SMTPAuthentication: Enabled + RemoteHosts.1.SMTPPort: 25 + RemoteHosts.1.SMTPUserName: username + RemoteHosts.1.SMTPPassword: password + tags: smtp-alerts + + - name: Configure the iDRAC attributes for webserver settings. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + WebServer.1.SSLEncryptionBitLength: 128-Bit or higher + WebServer.1.TLSProtocol: TLS 1.1 and Higher + tags: webserver-settings + + - name: Configure the iDRAC attributes for SNMP settings. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + SNMP.1.SNMPProtocol: All + SNMP.1.AgentEnable: Enabled + SNMP.1.TrapFormat: SNMPv1 + SNMP.1.AlertPort: 162 + SNMP.1.AgentCommunity: public + tags: snmp-settings + + - name: Configure the iDRAC LC attributes for collecting system inventory. + idrac_attributes: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + lifecycle_controller_attributes: + LCAttributes.1.CollectSystemInventoryOnRestart: Enabled + tags: collect-inventory + + - name: Configure the iDRAC system attributes for LCD settings. + idrac_attributes: + idrac_ip: "192.168.0.1" + idrac_user: "user_name" + idrac_password: "user_password" + ca_path: "/path/to/ca_cert.pem" + system_attributes: + LCD.1.Configuration: Service Tag + LCD.1.vConsoleIndication: Enabled + LCD.1.FrontPanelLocking: Full-Access + LCD.1.UserDefinedString: custom lcd string + tags: lcd-config + + - name: Configure the iDRAC attributes for Timezone settings. + idrac_attributes: + idrac_ip: "192.168.0.1" + idrac_user: "user_name" + idrac_password: "user_password" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + Time.1.TimeZone: CST6CDT + NTPConfigGroup.1.NTPEnable: Enabled + NTPConfigGroup.1.NTP1: 192.168.0.5 + NTPConfigGroup.1.NTP2: 192.168.0.6 + NTPConfigGroup.1.NTP3: 192.168.0.7 + tags: timezone-settings + + - name: Configure all attributes + dellemc.openmanage.idrac_attributes: + idrac_ip: "192.168.0.1" + idrac_user: "user_name" + idrac_password: "user_password" + ca_path: "/path/to/ca_cert.pem" + idrac_attributes: + SNMP.1.AgentCommunity: test + SNMP.1.AgentEnable: Enabled + SNMP.1.DiscoveryPort: 161 + system_attributes: + ServerOS.1.HostName: demohostname + lifecycle_controller_attributes: + LCAttributes.1.AutoUpdate: Disabled + tags: all-attributes diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_bios.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_bios.yml new file mode 100644 index 00000000..a541dce7 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_bios.yml @@ -0,0 +1,115 @@ +--- +- hosts: idrac + connection: local + name: Configure Boot Mode Setting + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure Bios Generic Attributes + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + attributes: + BootMode: "Bios" + OneTimeBootMode: "Enabled" + BootSeqRetry: "Enabled" + tags: + - bootconfig + + - name: Configure PXE Generic Attributes + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + attributes: + PxeDev1EnDis: "Enabled" + PxeDev1Protocol: "IPV4" + PxeDev1VlanEnDis: "Enabled" + PxeDev1VlanId: x + PxeDev1Interface: "NIC.Embedded.x-x-x" + PxeDev1VlanPriority: x + tags: + - pxeconfig + + - name: Configure attributes of the BIOS at Maintenance window + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + apply_time: AtMaintenanceWindowStart + maintenance_window: + start_time: "2022-09-30T05:15:40-05:00" + duration: 600 + attributes: + BootMode: "Bios" + OneTimeBootMode: "Enabled" + BootSeqRetry: "Enabled" + tags: + - at_maintenance_start + + - name: Clear pending BIOS attributes + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + clear_pending: yes + tags: + - clear_pending + + - name: Reset BIOS attributes to default settings. + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_pwd }}" + ca_path: "/path/to/ca_cert.pem" + reset_bios: yes + tags: + - reset_bios + + - name: Configure Boot Sources + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_sources: + - Name: "NIC.Integrated.x-x-x" + Enabled: true + Index: 1 + - Name: "NIC.Integrated.x-x-x" + Enabled: true + Index: 0 + tags: + - boot_sources + + - name: Configure Boot Sources - Enabled + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_sources: + - Name: "HardDisk.List.1-1" + Enabled: true + tags: + - boot_sources_enabled + + - name: Configure Boot Sources - Index + idrac_bios: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_sources: + - Name: "NIC.Integrated.x-x-x" + Index: 1 + tags: + - boot_sources_index
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot.yml new file mode 100644 index 00000000..22afb949 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot.yml @@ -0,0 +1,69 @@ +--- +- hosts: idrac + connection: local + name: Configure the boot order settings + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Configure the system boot options settings. + idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_options: + - display_name: Hard drive C + enabled: true + - boot_option_reference: NIC.PxeDevice.2-1 + enabled: true + tags: boot-option + + - name: Configure the boot order settings. + idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_order: + - Boot0001 + - Boot0002 + - Boot0004 + - Boot0003 + tags: boot-order + + - name: Configure the boot source override mode. + idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_source_override_mode: legacy + boot_source_override_target: cd + boot_source_override_enabled: once + tags: boot-mode + + - name: Configure the UEFI target settings. + idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_source_override_mode: uefi + boot_source_override_target: uefi_target + uefi_target_boot_source_override: "VenHw(3A191845-5F86-4E78-8FCE-C4CFF59F9DAA)" + tags: uefi-target + + - name: Configure the boot source override mode as pxe. + idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + boot_source_override_mode: legacy + boot_source_override_target: pxe + boot_source_override_enabled: continuous + tags: pxe-boot-mode diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot_virtual_media_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot_virtual_media_workflow.yml new file mode 100644 index 00000000..aa6d43ed --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_boot_virtual_media_workflow.yml @@ -0,0 +1,56 @@ +--- +- hosts: idrac + connection: local + name: Dell OpenManage Ansible iDRAC boot operations. + vars: + ansible_python_interpreter: /usr/bin/python3 + virtual_media_uri: "/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia/CD/Actions/VirtualMedia.InsertMedia" + file_location: "192.168.0.1:/nfsshare/path/to/boot_image.iso" + nfs_dir: "192.168.0.1:/nfsshare" + iso_file: "boot_image.iso" + ca_path: "/path/to/ca_cert.pem" + boot_source_mode: "legacy" #other options are UEFI + + gather_facts: False + + tasks: + +# Mount the ISO image as a virtual media CD. + - name: "Insert virtual media" + ansible.builtin.uri: + url: "https://{{ idrac_ip }}{{ virtual_media_uri }}" + user: "{{ idrac_user }}" + password: "{{ idrac_password }}" + method: "POST" + body_format: json + body: + Image: "{{ file_location }}" + Inserted: true + WriteProtected: true + use_proxy: yes + status_code: 204 + return_content: no + ca_path: "{{ ca_path }}" + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + tags: + - virtual_media + - vm_boot + +# One-time boot with virtual media. + - name: Boot once from mounted CD. + dellemc.openmanage.idrac_boot: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "{{ ca_path }}" + boot_source_override_mode: "{{ boot_source_mode }}" + boot_source_override_target: cd + boot_source_override_enabled: once + tags: + - boot_cd + - vm_boot + +# Eject the virtual media after boot. diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_certificates.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_certificates.yml new file mode 100644 index 00000000..801f12ed --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_certificates.yml @@ -0,0 +1,69 @@ +--- +- hosts: idrac + connection: local + name: Dell OpenManage Ansible iDRAC Certificates management. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Generate https signing request + idrac_certificates: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: "generate_csr" + certificate_type: "HTTPS" + certificate_path: "/home/omam/mycert_dir" + cert_params: + common_name: "sample.domain.com" + organization_unit: "OrgUnit" + locality_name: "Bangalore" + state_name: "Karnataka" + country_code: "IN" + email_address: "admin@domain.com" + organization_name: "OrgName" + subject_alt_name: + - 192.198.2.1 + + - name: Import a SSL certificate. + idrac_certificates: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: "import" + certificate_type: "HTTPS" + certificate_path: "/path/to/cert.pem" + + - name: Export a SSL certificate. + idrac_certificates: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: "export" + certificate_type: "HTTPS" + certificate_path: "/home/omam/mycert_dir" + + - name: Import a CSC certificate. + idrac_certificates: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: "import" + certificate_type: "CSC" + certificate_file: "/path/to/cert.pem" + + - name: Export a Client trust certificate. + idrac_certificates: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: "export" + certificate_type: "CLIENT_TRUST_CERTIFICATE" + certificate_path: "/home/omam/mycert_dir"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware.yml new file mode 100644 index 00000000..c1a2c891 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware.yml @@ -0,0 +1,69 @@ +--- +- hosts: idrac + connection: local + name: Update Firmware Inventory + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update firmware from repository on a HTTP/HTTP/FTP repository + idrac_firmware: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "https://downloads.dell.com" + reboot: True + job_wait: True + apply_update: True + + - name: Update firmware from repository on a internally hosted HTTP repository. + idrac_firmware: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password}}" + ca_path: "/path/to/ca_cert.pem" + share_name: "http://192.168.0.1/path_to_folder/" + reboot: True + job_wait: True + apply_update: True + catalog_file_name: "Catalog.xml" + + - name: Update firmware from repository on a NFS Share + idrac_firmware: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password}}" + ca_path: "/path/to/ca_cert.pem" + share_name: "192.168.0.1:/complete_share_path" + reboot: True + job_wait: True + apply_update: True + catalog_file_name: "Catalog.xml" + + - name: Update firmware from repository on a CIFS Share + idrac_firmware: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password}}" + ca_path: "/path/to/ca_cert.pem" + share_name: "\\\\192.168.0.1\\share_path" + share_user: "{{ share_user }}" + share_password: "{{ share_password }}" + share_mnt: "/mnt/cifs_share" + reboot: False + job_wait: True + catalog_file_name: "Catalog.xml" + + - name: Firmware compliance report using HTTPS repository. + idrac_firmare: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "https://downloads.dell.com" + reboot: False + job_wait: True + apply_update: False diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware_info.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware_info.yml new file mode 100644 index 00000000..aaca53a5 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_firmware_info.yml @@ -0,0 +1,16 @@ +--- +- hosts: idrac + connection: local + name: Get Installed Firmware Inventory + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get Installed Firmware Inventory. + idrac_firmware_info: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_job_status_info.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_job_status_info.yml new file mode 100644 index 00000000..9f0f61de --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_job_status_info.yml @@ -0,0 +1,17 @@ +--- +- hosts: idrac + connection: local + name: Get LC job Status + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get LC job Status + idrac_lifecycle_controller_job_status_info: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "JID_844222910040" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_jobs.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_jobs.yml new file mode 100644 index 00000000..495e84a6 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_jobs.yml @@ -0,0 +1,28 @@ +--- +- hosts: idrac + connection: local + name: Delete LC job + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Delete LC job Queue + idrac_lifecycle_controller_jobs: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + tags: + - delete_all_jobs + + - name: Delete a LC job + idrac_lifecycle_controller_jobs: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "JID_123456789" + tags: + - delete_job
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_logs.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_logs.yml new file mode 100644 index 00000000..99c9d0ce --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_logs.yml @@ -0,0 +1,18 @@ +--- +- hosts: idrac + connection: local + name: Export Lifecycle Controller Logs + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Export Lifecycle Controller Logs + idrac_lifecycle_controller_logs: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "{{ playbook_dir }}" + job_wait: "True"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_status_info.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_status_info.yml new file mode 100644 index 00000000..1798ab99 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_lifecycle_controller_status_info.yml @@ -0,0 +1,16 @@ +--- +- hosts: idrac + connection: local + name: Check LC Ready Status + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Check LC Ready Status + idrac_lifecycle_controller_status_info: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_os_deployment.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_os_deployment.yml new file mode 100644 index 00000000..3ad52adc --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_os_deployment.yml @@ -0,0 +1,22 @@ +--- +- hosts: idrac + connection: local + gather_facts: false + name: Booting to Network Operating System image + + collections: + - dellemc.openmanage + + tasks: + - name: "Booting to Network Operating System image" + idrac_os_deployment: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "{{ playbook_dir }}" + iso_image: "uninterrupted_os_installation_image.iso." + expose_duration: 180 + + tags: + - network_iso
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller.yml new file mode 100644 index 00000000..2cb44788 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller.yml @@ -0,0 +1,216 @@ +--- +- hosts: idrac + connection: local + name: Dell OpenManage Ansible iDRAC Redfish Storage Controller service. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Assign dedicated hot spare. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + volume_id: + - "Disk.Virtual.0:RAID.Slot.1-1" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - assign_dedicated_hot_spare + + - name: Assign global hot spare. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - assign_global_hot_spare + + - name: Unassign hot spare + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + command: UnassignSpare + tags: + - un-assign-hot-spare + + - name: Set controller encryption key. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "SetControllerKey" + controller_id: "RAID.Slot.1-1" + key: "PassPhrase@123" + key_id: "mykeyid123" + tags: + - set_controller_key + + - name: Rekey in LKM mode. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ReKey" + controller_id: "RAID.Slot.1-1" + key: "NewPassPhrase@123" + key_id: "newkeyid123" + old_key: "OldPassPhrase@123" + tags: + - rekey_lkm + + - name: Rekey in SEKM mode. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ReKey" + controller_id: "RAID.Slot.1-1" + mode: "SEKM" + tags: + - rekey_sekm + + - name: Remove controller key. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "RemoveControllerKey" + controller_id: "RAID.Slot.1-1" + tags: + - remove_controller_key + + - name: Reset controller configuration. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ResetConfig" + controller_id: "RAID.Slot.1-1" + tags: + - reset_config + + - name: Enable controller encryption + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "EnableControllerEncryption" + controller_id: "RAID.Slot.1-1" + mode: "LKM" + key: "your_Key@123" + key_id: "your_Keyid@123" + tags: + - enable-encrypt + + - name: Blink physical disk. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "BlinkTarget" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - blink-target + + - name: Blink virtual drive. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "BlinkTarget" + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - blink-volume + + - name: Unblink physical disk. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "UnBlinkTarget" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - unblink-target + + - name: Unblink virtual drive. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "UnBlinkTarget" + volume_id: "Disk.Virtual.0:RAID.Slot.1-1" + tags: + - unblink-drive + + - name: Convert physical disk to RAID + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-raid + + - name: Convert physical disk to non-RAID + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ConvertToNonRAID" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - convert-non-raid + + - name: Change physical disk state to online. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-online + + - name: Change physical disk state to offline. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ChangePDStateToOnline" + target: "Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1" + tags: + - pd-state-offline + + - name: Lock virtual drive + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "LockVirtualDisk" + volume_id: "Disk.Virtual.0:RAID.SL.3-1" + tags: + - lock diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller_job_tracking.yml new file mode 100644 index 00000000..d61112f0 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_redfish_storage_controller_job_tracking.yml @@ -0,0 +1,138 @@ +--- +- hosts: idrac + connection: local + name: iDRAC Redfish storage controller service with job tracking. + gather_facts: False + vars: + retries_count: 100 + polling_interval: 10 + all_ctrl_task_tags: + - assign_dedicated_hot_spare + - assign_global_hot_spare + - set_controller_key + - rekey_lkm + - rekey_sekm + - remove_controller_key + - reset_config + + collections: + - dellemc.openmanage + +# Use a single tag to run each task with job tracker + tasks: + - name: Assign dedicated hot spare. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + volume_id: + - "Disk.Virtual.0:RAID.Slot.1-1" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + register: result + tags: + - assign_dedicated_hot_spare + + - name: Assign global hot spare. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + target: "Disk.Bay.0:Enclosure.Internal.0-1:RAID.Slot.1-1" + register: result + tags: + - assign_global_hot_spare + + - name: Set controller encryption key. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "SetControllerKey" + controller_id: "RAID.Slot.1-1" + key: "PassPhrase@123" + key_id: "mykeyid123" + register: result + tags: + - set_controller_key + + - name: Rekey in LKM mode. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ReKey" + controller_id: "RAID.Slot.1-1" + key: "NewPassPhrase@123" + key_id: "newkeyid123" + old_key: "OldPassPhrase@123" + register: result + tags: + - rekey_lkm + + - name: Rekey in SEKM mode. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ReKey" + controller_id: "RAID.Slot.1-1" + mode: "SEKM" + register: result + tags: + - rekey_sekm + + - name: Remove controller key. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "RemoveControllerKey" + controller_id: "RAID.Slot.1-1" + register: result + tags: + - remove_controller_key + + - name: Reset controller configuration. + idrac_redfish_storage_controller: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "ResetConfig" + controller_id: "RAID.Slot.1-1" + register: result + tags: + - reset_config + + - name: "iDRAC Job tracking" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: result + until: result.json.JobState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: "{{ all_ctrl_task_tags }}" + + - name: "iDRAC job result." + set_fact: + job_details: "{{ result.json }}" + failed_when: result.json.Message == "Failed" + changed_when: result.json.Message != "Failed" + tags: "{{ all_ctrl_task_tags }}"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml new file mode 100644 index 00000000..209befd2 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset.yml @@ -0,0 +1,19 @@ +--- +- hosts: idrac + connection: local + name: Reset iDRAC + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Reset iDRAC + idrac_reset: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + + tags: + - idrac_reset
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset_result_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset_result_tracking.yml new file mode 100644 index 00000000..534b2227 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_reset_result_tracking.yml @@ -0,0 +1,39 @@ +--- +- hosts: idrac + connection: local + name: Reset iDRAC + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Reset iDRAC + idrac_reset: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + register: result + failed_when: result is changed + + - name: Wait for port 443 to become open on the host + wait_for: + host: "{{idrac_ip}}" + port: 443 + delay: 30 + connect_timeout: 5 + timeout: 500 + register: result + failed_when: result.elapsed < 20 + + - name: Get LC status. + idrac_lifecycle_controller_status_info: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + register: result + until: result.msg.LCStatus == 'Ready' or result.msg.LCReady is true + retries: 30 + delay: 10 diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_server_config_profile.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_server_config_profile.yml new file mode 100644 index 00000000..0d61f54c --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_server_config_profile.yml @@ -0,0 +1,220 @@ +--- +- hosts: idrac + connection: local + name: Server Configuration Profile + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Export SCP with IDRAC components in JSON format to a local path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "/scp_folder" + scp_components: IDRAC + scp_file: example_file + export_format: JSON + export_use: Clone + job_wait: True + tags: export-scp-local + + - name: Import SCP with IDRAC components in JSON format from a local path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "/scp_folder" + command: import + scp_components: "IDRAC" + scp_file: example_file.json + shutdown_type: Graceful + end_host_power_state: "On" + job_wait: False + tags: import-scp-local + + - name: Export SCP with BIOS components in XML format to a NFS share path with auto-generated file name + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "192.168.0.2:/share" + scp_components: "BIOS" + export_format: XML + export_use: Default + job_wait: True + tags: export-scp-nfs + + - name: Import SCP with BIOS components in XML format from a NFS share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "192.168.0.2:/share" + command: import + scp_components: "BIOS" + scp_file: 192.168.0.1_20210618_162856.xml + shutdown_type: NoReboot + end_host_power_state: "Off" + job_wait: False + tags: import-scp-nfs + + - name: Export SCP with RAID components in XML format to a CIFS share path with share user domain name + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "\\\\192.168.0.2\\share" + share_user: share_username@domain + share_password: share_password + share_mnt: /mnt/cifs + scp_file: example_file.xml + scp_components: "RAID" + export_format: XML + export_use: Default + job_wait: True + tags: export-scp-cifs + + - name: Import SCP with RAID components in XML format from a CIFS share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "\\\\192.168.0.2\\share" + share_user: share_username + share_password: share_password + share_mnt: /mnt/cifs + command: import + scp_components: "RAID" + scp_file: example_file.xml + shutdown_type: Forced + end_host_power_state: "On" + job_wait: True + tags: import-scp-cifs + + - name: Export SCP with ALL components in JSON format to a HTTP share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "http://192.168.0.3/share" + share_user: share_username + share_password: share_password + scp_file: example_file.json + scp_components: ALL + export_format: JSON + job_wait: False + tags: export-scp-http + + - name: Import SCP with ALL components in JSON format from a HTTP share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: import + share_name: "http://192.168.0.3/share" + share_user: share_username + share_password: share_password + scp_file: example_file.json + shutdown_type: Graceful + end_host_power_state: "On" + job_wait: True + tags: import-scp-http + + - name: Export SCP with ALL components in XML format to a HTTPS share path without SCP file name + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "https://192.168.0.4/share" + share_user: share_username + share_password: share_password + scp_components: ALL + export_format: XML + export_use: Replace + job_wait: True + tags: export-scp-https + + - name: Import SCP with ALL components in XML format from a HTTPS share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + command: import + share_name: "https://192.168.0.4/share" + share_user: share_username + share_password: share_password + scp_file: 192.168.0.1_20160618_164647.xml + shutdown_type: Graceful + end_host_power_state: "On" + job_wait: False + tags: import-scp-https + + - name: Preview SCP with ALL components in XML format from a CIFS share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "\\\\192.168.0.2\\share" + share_user: share_username + share_password: share_password + command: preview + scp_components: "ALL" + scp_file: example_file.xml + job_wait: True + tags: preview-scp-cifs + + - name: Preview SCP with ALL components in JSON format from a NFS share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "192.168.0.2:/share" + command: preview + scp_components: "IDRAC" + scp_file: example_file.xml + job_wait: True + tags: preview-scp-nfs + + - name: Preview SCP with ALL components in XML format from a HTTP share path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "http://192.168.0.1/http-share" + share_user: share_username + share_password: share_password + command: preview + scp_components: "ALL" + scp_file: example_file.xml + job_wait: True + tags: preview-scp-http + + - name: Preview SCP with ALL components in XML format from a local path + dellemc.openmanage.idrac_server_config_profile: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "/scp_folder" + command: preview + scp_components: "IDRAC" + scp_file: example_file.json + job_wait: False + tags: import-scp-local diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_syslog.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_syslog.yml new file mode 100644 index 00000000..9820b6b6 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_syslog.yml @@ -0,0 +1,18 @@ +--- +- hosts: idrac + connection: local + name: Configure iDRAC syslog attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure iDRAC syslog attributes + idrac_syslog: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + share_name: "{{ playbook_dir }}" + syslog: "Disabled"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_system_info.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_system_info.yml new file mode 100644 index 00000000..b2f1e1ec --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_system_info.yml @@ -0,0 +1,16 @@ +--- +- hosts: idrac + connection: local + name: Get system inventory + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get system inventory. + idrac_system_info: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_user.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_user.yml new file mode 100644 index 00000000..ab011e13 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_user.yml @@ -0,0 +1,71 @@ +--- +- hosts: idrac + connection: local + name: Configure the iDRAC users attributes + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure the create iDRAC users attributes + idrac_user: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + user_name: "user_name" + user_password: "user_password" + privilege: "Administrator" + ipmi_lan_privilege: "User" + enable: "true" + sol_enable: "true" + protocol_enable: "true" + authentication_protocol: "MD5" + privacy_protocol: "DES" + tags: + - create-user + + - name: Configure the modify iDRAC users attributes + idrac_user: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + action: "present" + user_name: "user_name" + new_user_name: "new_user_name" + privilege: "Administrator" + ipmi_lan_privilege: "User" + enable: "true" + sol_enable: "true" + protocol_enable: "true" + authentication_protocol: "MD5" + privacy_protocol: "DES" + tags: + - modify-user + + - name: Configure the modify iDRAC username and password attributes. + idrac_user: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + action: "present" + user_name: "user_name" + new_user_name: "new_user_name" + user_password: "user_password" + tags: + - modify-username + + - name: Configure the delete iDRAC users attributes + idrac_user: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + user_name: "user_name" + tags: + - remove-user diff --git a/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_virtual_media.yml b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_virtual_media.yml new file mode 100644 index 00000000..9a2cc520 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/idrac/idrac_virtual_media.yml @@ -0,0 +1,107 @@ +--- +- hosts: idrac + connection: local + name: Configure the boot order settings + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Insert image file to Remote File Share 1 using CIFS share. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + virtual_media: + - insert: true + image: "//192.168.0.2/file_path/file.iso" + username: "username" + password: "password" + tags: insert-media-cifs + + - name: Insert image file to Remote File Share 2 using NFS share. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + virtual_media: + - index: 2 + insert: true + image: "192.168.0.4:/file_path/file.iso" + tags: insert-media-nfs + + - name: Insert image file to Remote File Share 1 and 2 using HTTP. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + force: true + virtual_media: + - index: 1 + insert: true + image: "http://192.168.0.4/file_path/file.img" + - index: 2 + insert: true + image: "http://192.168.0.4/file_path/file.img" + tags: insert-media-http + + - name: Insert image file using HTTPS. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + force: true + virtual_media: + - index: 1 + insert: true + image: "https://192.168.0.5/file_path/file.img" + username: username + password: password + tags: insert-media-http + + - name: Eject multiple virtual media. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + virtual_media: + - index: 1 + insert: false + - index: 2 + insert: false + tags: eject-media + + - name: Ejection of image file from Remote File Share 1. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + force: true + virtual_media: + insert: false + tags: eject-media-rfs1 + + - name: Insertion and ejection of image file in single task. + idrac_virtual_media: + idrac_ip: "{{ idrac_ip }}" + idrac_user: "{{ idrac_user }}" + idrac_password: "{{ idrac_password }}" + ca_path: "/path/to/ca_cert.pem" + force: true + virtual_media: + - index: 1 + insert: true + image: https://192.168.0.5/file/file.iso + username: username + password: password + - index: 2 + insert: false + tags: insert-eject-media diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml new file mode 100644 index 00000000..f77eabdd --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml @@ -0,0 +1,37 @@ +--- +- hosts: ome + connection: local + name: Configure the SMTP settings of OME and OME-M. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update SMTP destination server configuration with authentication + ome_application_alerts_smtp: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + destination_address: "localhost" + port_number: 25 + use_ssl: true + enable_authentication: true + credentials: + username: "username" + password: "password" + tags: + - smtp_auth + - name: Update SMTP destination server configuration without authentication + ome_application_alerts_smtp: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + destination_address: "localhost" + port_number: 25 + use_ssl: false + enable_authentication: false + tags: + - smtp_no_auth
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml new file mode 100644 index 00000000..9fce647e --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml @@ -0,0 +1,40 @@ +--- +- hosts: ome + connection: local + name: Configure syslog forwarding settings on OpenManage Enterprise and OpenManage Enterprise Modular + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure single server to forward syslog + ome_application_alerts_syslog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + syslog_servers: + - id: 1 + enabled: true + destination_address: 192.168.0.2 + port_number: 514 + + - name: Configure multiple server to forward syslog + ome_application_alerts_syslog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + syslog_servers: + - id: 1 + port_number: 523 + - id: 2 + enabled: true + destination_address: sysloghost1.lab.com + - id: 3 + enabled: false + - id: 4 + enabled: true + destination_address: 192.168.0.4 + port_number: 514
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml new file mode 100644 index 00000000..ab0fb9eb --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml @@ -0,0 +1,53 @@ +--- +- hosts: ome + connection: local + name: Dell OME Application Certificate Signing Request. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: generate certificate signing request. + ome_application_certificate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "generate_csr" + distinguished_name: "hostname.com" + department_name: "Remote Access Group" + business_name: "Dell Inc." + locality: "Round Rock" + country_state: "Texas" + country: "US" + email: "support@dell.com" + register: result + tags: + - generate + + - name: copy CSR data into a file. + ansible.builtin.copy: + content: "{{ result.csr_status.CertificateData }}" + dest: "csr_data.txt" + tags: + - csr-data + + - name: upload the certificate. + ome_application_certificate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "upload" + upload_file: "/path/certificate.cer" + tags: + - upload + + - name: "once certificate uploaded, OME cannot be accessed for few seconds, hence wait for 10 seconds." + wait_for: + host: "{{ hostname }}" + port: "{{ port }}" + delay: 10 + tags: + - upload diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml new file mode 100644 index 00000000..b0b29ae9 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml @@ -0,0 +1,97 @@ +--- +- hosts: ome + connection: local + name: Dell OME Application Console Preferences. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update Console preferences with all the settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 123 + device_health: + health_check_interval: 1 + health_check_interval_unit: "Hourly" + health_and_power_state_on_connection_lost: "last_known" + discovery_settings: + general_device_naming: "DNS" + server_device_naming: "IDRAC_HOSTNAME" + invalid_device_hostname: "localhost" + common_mac_addresses: "::" + server_initiated_discovery: + device_discovery_approval_policy: "Automatic" + set_trap_destination: True + mx7000_onboarding_preferences: "all" + builtin_appliance_share: + share_options: "CIFS" + cifs_options: "V1" + email_sender_settings: "admin@dell.com" + trap_forwarding_format: "Original" + metrics_collection_settings: 31 + tags: + - all_settings + + - name: Update Console preferences with report and device health settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + report_row_limit: 236 + device_health: + health_check_interval: 10 + health_check_interval_unit: "Hourly" + health_and_power_state_on_connection_lost: "last_known" + tags: + - valid_report_device + + - name: Update Console preferences with invalid device health settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_health: + health_check_interval: 65 + health_check_interval_unit: "Minutes" + tags: + - invalid_device + + - name: Update Console preferences with discovery and built in appliance share settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + discovery_settings: + general_device_naming: "DNS" + server_device_naming: "IDRAC_SYSTEM_HOSTNAME" + invalid_device_hostname: "localhost" + common_mac_addresses: "00:53:45:00:00:00" + builtin_appliance_share: + share_options: "CIFS" + cifs_options: "V1" + tags: + - valid_discovery + + - name: Update Console preferences with server initiated discovery, mx7000 onboarding preferences, email sender, trap forwarding format, and metrics collection settings. + ome_application_console_preferences: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + server_initiated_discovery: + device_discovery_approval_policy: "Automatic" + set_trap_destination: True + mx7000_onboarding_preferences: "chassis" + email_sender_settings: "admin@dell.com" + trap_forwarding_format: "Normalized" + metrics_collection_settings: 361 + tags: + - valid_metrics diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml new file mode 100644 index 00000000..3eff08bc --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml @@ -0,0 +1,115 @@ +--- +- hosts: ome + connection: local + name: Dell OME Application network settings. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: IPv4 network settings + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + ipv4_configuration: + enable: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + use_dhcp_for_dns_server_names: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: "" + reboot_delay: 5 + tags: + - ipv4_config + + - name: IPv6 network settings + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + ipv6_configuration: + enable: true + enable_auto_configuration: true + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2 + use_dhcp_for_dns_server_names: true + static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3 + static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4 + reboot_delay: 10 + tags: + - ipv6_config + + - name: Management vLAN settings for primary interface + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + management_vlan: + enable_vlan: true + vlan_id: 3344 + dns_configuration: + register_with_dns: false + reboot_delay: 1 + tags: + - mgmt_vlan + + - name: DNS settings + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + ipv4_configuration: + enable: true + use_dhcp_for_dns_server_names: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: 192.168.0.5 + dns_configuration: + register_with_dns: true + use_dhcp_for_dns_domain_name: false + dns_name: "MX-SVCTAG" + dns_domain_name: "localdomainname" + reboot_delay: 1 + tags: + - dns_config + + - name: Complete network settings + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + ipv4_configuration: + enable: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + use_dhcp_for_dns_server_names: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: 192.168.0.5 + ipv6_configuration: + enable: true + enable_auto_configuration: true + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2 + use_dhcp_for_dns_server_names: true + static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3 + static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4 + dns_configuration: + register_with_dns: true + use_dhcp_for_dns_domain_name: false + dns_name: "MX-SVCTAG" + dns_domain_name: "localdomainname" + reboot_delay: 1 + tags: + - all_network_config
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml new file mode 100644 index 00000000..1f4cf709 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml @@ -0,0 +1,65 @@ +--- +- hosts: ome + vars: + retries_count: 50 + polling_interval: 5 # in seconds + connection: local + name: OME - Complete network settings with details tracking + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Complete network settings + ome_application_network_address: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + ipv4_configuration: + enable: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + use_dhcp_for_dns_server_names: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: 192.168.0.5 + ipv6_configuration: + enable: true + enable_auto_configuration: true + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2 + use_dhcp_for_dns_server_names: true + static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3 + static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4 + dns_configuration: + register_with_dns: true + use_dhcp_for_dns_domain_name: false + dns_name: "MX-SVCTAG" + dns_domain_name: "localdomainname" + reboot_delay: 1 + register: facts_result + + # To end play when no job_info + - name: "End the play when no job_info" + meta: end_play + when: + - facts_result.changed == false + - "'job_info' not in facts_result" + + - name: "Get job details using job id from network address config task." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ facts_result.job_info.Id }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name == 'Failed' + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml new file mode 100644 index 00000000..0c0e8abf --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml @@ -0,0 +1,44 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Application network proxy setting. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update proxy configuration and enable authentication. + ome_application_network_proxy: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + enable_proxy: true + ip_address: "192.168.0.2" + proxy_port: 444 + enable_authentication: true + proxy_username: "root" + proxy_password: "proxy_password" + tags: setting1 + + - name: Reset proxy authentication. + ome_application_network_proxy: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + enable_proxy: true + ip_address: "192.168.0.2" + proxy_port: 444 + enable_authentication: false + tags: setting2 + + - name: Reset proxy configuration. + ome_application_network_proxy: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + enable_proxy: false + tags: setting3 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml new file mode 100644 index 00000000..68340ba9 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml @@ -0,0 +1,73 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Application network setting. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure universal inactivity timeout + ome_application_network_settings: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + session_inactivity_timeout: + enable_universal_timeout: true + universal_timeout: 30 + api_sessions: 90 + gui_sessions: 5 + ssh_sessions: 2 + serial_sessions: 1 + tags: + - enable_universal_timeout + - name: Configure API and GUI timeout and sessions + ome_application_network_settings: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + session_inactivity_timeout: + api_timeout: 20 + api_sessions: 100 + gui_timeout: 25 + gui_sessions: 5 + tags: + - enable_api_gui_timout_sessions + - name: Configure timeout and sessions for all parameters + ome_application_network_settings: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + session_inactivity_timeout: + api_timeout: 20 + api_sessions: 100 + gui_timeout: 15 + gui_sessions: 5 + ssh_timeout: 30 + ssh_sessions: 2 + serial_timeout: 35 + serial_sessions: 1 + tags: + - enable_all_timeout_sessions + - name: Disable universal timeout and configure timeout and sessions for other parameters + ome_application_network_settings: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + session_inactivity_timeout: + enable_universal_timeout: false + api_timeout: 20 + api_sessions: 100 + gui_timeout: 15 + gui_sessions: 5 + ssh_timeout: 30 + ssh_sessions: 2 + serial_timeout: 35 + serial_sessions: 1 + tags: + - disa_all_timeout_sessions
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml new file mode 100644 index 00000000..7dd4edad --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml @@ -0,0 +1,33 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Application network time setting. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure system time. + ome_application_network_time: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + enable_ntp: false + system_time: "2020-03-31 21:35:18" + time_zone: "TZ_ID_11" + tags: time_setting1 + + - name: Configure NTP server for time synchronization. + ome_application_network_time: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + time_zone: "TZ_ID_66" + enable_ntp: true + primary_ntp_address: "192.168.0.2" + secondary_ntp_address1: "192.168.0.3" + secondary_ntp_address2: "192.168.0.4" + tags: time_setting2 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml new file mode 100644 index 00000000..a57e0b90 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml @@ -0,0 +1,31 @@ +--- +- hosts: ome + connection: local + gather_facts: false + name: "Ome application network time zone informaion - Ansible Module" + vars: + time_zone_uri: "/api/ApplicationService/Network/TimeZones" + + collections: + - dellemc.openmanage + + tasks: + - name: "Get list of all available times zones along with information specific to each time zone." + uri: + url: "https://{{ baseuri }}{{ time_zone_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200 + validate_certs: no + force_basic_auth: yes + register: time_zone_result + failed_when: "'value' not in time_zone_result.json" + + - name: Get specific time zone ID using time zone name + with_items: + - "{{ time_zone_result.json.value }}" + debug: + msg: "{{item['Id']}}" + when: item['Name']=='(GMT+05:30) Sri Jayawardenepura' diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml new file mode 100644 index 00000000..e445ed84 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml @@ -0,0 +1,40 @@ +--- +- hosts: ome + connection: local + name: Dell OME Application network webserver settings. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update webserver port and session time out configuration. + ome_application_network_webserver: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + webserver_port: 443 + webserver_timeout: 10 + tags: + - port_timeout_update + + - name: Update session time out + ome_application_network_webserver: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + webserver_timeout: 30 + tags: + - timeout_update + + - name: Update web server port. + ome_application_network_webserver: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + webserver_port: 8443 + tags: + - port_update
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml new file mode 100644 index 00000000..28911b80 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml @@ -0,0 +1,61 @@ +--- +- hosts: ome + connection: local + name: "Dell OME Application network webserver port change and track web + server till the service restarts." + gather_facts: False + vars: + # 5 minutes wait max + retries_count: 30 + polling_interval: 10 + webserver_uri: "/api/ApplicationService/Network/WebServerConfiguration" + + collections: + - dellemc.openmanage + + tasks: + # Update web server configuration + - name: Update webserver port and timeout of OME + ome_application_network_webserver: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + port: "{{ ome_webserver_port }}" + webserver_port: "{{ new_port }}" + webserver_timeout: 21 + register: result + + # To end play when no port change or failure + - name: "End the play when no port change" + meta: end_play + when: + - result.changed == false + - "'webserver_configuration' not in result" + + # Loop till OME webserver is active by using the new port and webserver config GET call + - name: "Pause play until webserver URL is reachable from this host with new port" + uri: + url: "https://{{ hostname }}:{{ result.webserver_configuration.PortNumber + }}{{ webserver_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: webport_result + until: "'PortNumber' in webport_result or webport_result.status == 200" + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + # Output the webserver_configuration values to be used further + - name: "Output the webserver config" + vars: + webserver_configuration: "{{ webport_result.json }}" + debug: + var: webserver_configuration
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml new file mode 100644 index 00000000..6a259e96 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml @@ -0,0 +1,57 @@ +--- +- hosts: ome + connection: local + name: Configure login security settings + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Configure restricted allowed IP range + ome_application_security_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + restrict_allowed_ip_range: + enable_ip_range: true + ip_range: 192.1.2.3/24 + + - name: Configure login lockout policy + ome_application_security_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + login_lockout_policy: + by_user_name: true + by_ip_address: true + lockout_fail_count: 3 + lockout_fail_window: 30 + lockout_penalty_time: 900 + + - name: Configure restricted allowed IP range and login lockout policy with job wait time out of 60 seconds + ome_application_security_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + restrict_allowed_ip_range: + enable_ip_range: true + ip_range: 192.1.2.3/24 + login_lockout_policy: + by_user_name: true + by_ip_address: true + lockout_fail_count: 3 + lockout_fail_window: 30 + lockout_penalty_time: 900 + job_wait_timeout: 60 + + - name: Enable FIPS mode + ome_application_security_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fips_mode_enable: yes diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml new file mode 100644 index 00000000..1d5f2375 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml @@ -0,0 +1,119 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible configuration compliance baseline. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create a configuration compliance baseline using device IDs + ome_configuration_compliance_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + description: "description of baseline" + names: "baseline1" + device_ids: + - 1111 + - 2222 + tags: + - create_compliance_baseline_device_id + + - name: Create a configuration compliance baseline using device service tags + ome_configuration_compliance_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + names: "baseline1" + description: "description of baseline" + device_service_tags: + - "SVCTAG1" + - "SVCTAG2" + tags: + - create_compliance_baseline_tags + + - name: Create a configuration compliance baseline using group names + ome_configuration_compliance_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + job_wait_timeout: 1000 + names: "baseline1" + description: "description of baseline" + device_group_names: + - "Group1" + - "Group2" + tags: + - create_compliance_baseline_group_id + + - name: Delete the configuration compliance baselines + ome_configuration_compliance_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: delete + names: + - baseline1 + - baseline2 + tags: + - delete_compliance_baseline + + - name: Modify a configuration compliance baseline using group names + ome_configuration_compliance_baseline: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + names: "baseline1" + new_name: "baseline_update" + template_name: "template2" + description: "new description of baseline" + job_wait_timeout: 1000 + device_group_names: + - Group1 + + - name: Remediate specific non-compliant devices to a configuration compliance baseline using device IDs + ome_configuration_compliance_baseline: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "remediate" + names: "baseline1" + device_ids: + - 1111 + + - name: Remediate specific non-compliant devices to a configuration compliance baseline using device service tags + ome_configuration_compliance_baseline: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "remediate" + names: "baseline1" + job_wait_timeout: 2000 + device_service_tags: + - "SVCTAG1" + - "SVCTAG2" + + - name: Remediate all the non-compliant devices to a configuration compliance baseline + ome_configuration_compliance_baseline: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "remediate" + job_wait_timeout: 2000 + names: "baseline1"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml new file mode 100644 index 00000000..076ce84d --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml @@ -0,0 +1,52 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible configuration compliance baseline workflow. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Create a configuration compliance baseline using group names + ome_configuration_compliance_baseline: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + job_wait_timeout: 1000 + names: "baseline1" + description: "description of baseline" + device_group_names: + - "Group1" + - "Group2" + + - name: Retrieve the compliance report of all of the devices in the specified configuration compliance baseline. + ome_configuration_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline: "baseline1" + register: compliance_report + + # This tasks returns list of device ids. + # In case if you want to get devices based on service tag change attribute ServiceTag + # and next task device_ids attribute replaced with device_service_tag. + - name: Filter the non compliant device based on the retrieved compliance report. + ansible.builtin.set_fact: + non_compliance_devices: "{{ compliance_report.compliance_info | json_query(\"value[?ComplianceStatus=='NONCOMPLIANT']\") | map(attribute='Id') | list }}" + + - name: Remediate a specified non-complaint devices to a configuration compliance baseline using device IDs + ome_configuration_compliance_baseline: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "remediate" + names: "baseline1" + device_ids: "{{ non_compliance_devices }}" + when: "non_compliance_devices|length>0" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml new file mode 100644 index 00000000..a2455703 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml @@ -0,0 +1,35 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Module for Device compliance information + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve the compliance report of all of the devices in the specified configuration compliance baseline. + ome_configuration_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline: baseline_name + + - name: Retrieve the compliance report for a specific device associated with the baseline using the device ID. + ome_configuration_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline: baseline_name + device_id: 10001 + + - name: Retrieve the compliance report for a specific device associated with the baseline using the device service tag. + ome_configuration_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline: baseline_name + device_service_tag: 2HFGH3 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml new file mode 100644 index 00000000..48259af6 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml @@ -0,0 +1,26 @@ +--- +- hosts: ome + connection: local + gather_facts: false + name: "OME - Ansible Modules" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Retrieve baseline information for specific baseline." + ome_firmware_baseline_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + register: result + + - name: "Filter out device compliance reports." + loop: "{{ result.baseline_compliance_info }}" + debug: + msg: "{{item.ComponentComplianceReports}}" + loop_control: + label: "{{ item.DeviceId }}"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml new file mode 100644 index 00000000..77d4eddf --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml @@ -0,0 +1,28 @@ +--- +- hosts: ome + connection: local + gather_facts: false + name: "OME - Ansible Modules" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Retrieve baseline information for specified devices." + ome_firmware_baseline_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_ids: + - 11111 + - 22222 + register: result + + - name: "Filter out device compliance reports." + debug: + msg: "{{ item.DeviceComplianceReports.0.ComponentComplianceReports }}" + loop: "{{ result.baseline_compliance_info }}" + loop_control: + label: "{{ item.Name }}"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml new file mode 100644 index 00000000..35f0eb23 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml @@ -0,0 +1,75 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible OME firmware baseline operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create baseline for device IDs + ome_firmware_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + baseline_description: "baseline_description" + catalog_name: "catalog_name" + device_ids: + - 1010 + - 2020 + + - name: Create baseline for servicetags + ome_firmware_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + baseline_description: "baseline_description" + catalog_name: "catalog_name" + device_service_tags: + - "SVCTAG1" + - "SVCTAG2" + + - name: create baseline for device groups without job_tracking + ome_firmware_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + baseline_description: "baseline_description" + catalog_name: "catalog_name" + device_group_names: + - "Group1" + - "Group2" + job_wait: no + + - name: Modify an existing baseline + ome_firmware_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "existing_baseline_name" + new_baseline_name: "new_baseline_name" + baseline_description: "new baseline_description" + catalog_name: "catalog_other" + device_group_names: + - "Group3" + - "Group4" + - "Group5" + downgrade_enabled: no + is_64_bit: yes + + - name: Delete a baseline + ome_firmware_baseline: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: absent + baseline_name: "baseline_name"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml new file mode 100644 index 00000000..cb42e174 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml @@ -0,0 +1,51 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible template inventory details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieves device based compliance report for specified device IDs.. + ome_firmware_baseline_compliance_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_ids: + - 11111 + - 22222 + tags: device_ids + + - name: Retrieves device based compliance report for specified service Tags. + ome_firmware_baseline_compliance_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_service_tags: + - MXL1234 + - MXL4567 + tags: device_service_tags + + - name: Retrieves device based compliance report for specified group names. + ome_firmware_baseline_compliance_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_group_names: + - group1 + - group2 + tags: device_group_names + + - name: Retrieves device compliance report for a specified baseline. + ome_firmware_baseline_compliance_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + tags: baseline_device
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml new file mode 100644 index 00000000..bbbf5f0d --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml @@ -0,0 +1,63 @@ +--- +- hosts: ome + connection: local + gather_facts: false + name: "OME - Ansible Modules" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Retrieve baseline information for specific device ids." + ome_firmware_baseline_compliance_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_ids: + - 11111 + - 11112 + register: result + + tags: + - overall-compliance-report + + - name: "Firmware baseline compliance info based on FirmwareStatus - Non-Compliant" + set_fact: + non_compliance_fact: "{{ item }}" + when: + - item.DeviceComplianceReports.0.FirmwareStatus=='Non-Compliant' + with_items: + - "{{ result.baseline_compliance_info }}" + loop_control: + label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.FirmwareStatus }}" + + tags: + - non-compliance-report + + - name: "Firmware baseline compliance info based on Device ID" + set_fact: + device_fact: "{{ item }}" + when: + - item.DeviceComplianceReports.0.DeviceId==11111 + with_items: + - "{{ result.baseline_compliance_info }}" + loop_control: + label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.DeviceId }}" + + tags: + - device-id-report + + - name: "Firmware baseline compliance info based on Device Service Tag" + set_fact: + service_tag_fact: "{{ item }}" + when: + - item.DeviceComplianceReports.0.ServiceTag=='1X1X1' + with_items: + - "{{ result.baseline_compliance_info }}" + loop_control: + label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.ServiceTag }}" + + tags: + - device-service-tag-report
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml new file mode 100644 index 00000000..7993db51 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml @@ -0,0 +1,26 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible firmware baseline details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve details of all the available firmware baselines. + ome_firmware_baseline_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + tags: firmware_baselines + + - name: Retrieve details of a specific firmware baseline identified by its baseline name. + ome_firmware_baseline_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_name" + tags: firmware_baseline
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml new file mode 100644 index 00000000..a065a3c0 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml @@ -0,0 +1,121 @@ +--- +- hosts: ome + connection: local + name: "OME - Create Catalog using Repository." + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create a catalog from HTTPS repository + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + catalog_name: catalog1 + catalog_description: catalog description + source: downloads.company.com + repository_type: HTTPS + source_path: "catalog" + file_name: "catalog.gz" + check_certificate: True + + - name: Create a catalog from HTTP repository + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + catalog_name: "{{ catalog_name }}" + catalog_description: catalog description + source: downloads.company.com + repository_type: HTTP + source_path: "catalog" + file_name: "catalog.gz" + + - name: Create a catalog using CIFS share + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + catalog_name: "{{ catalog_name }}" + catalog_description: catalog description + source: "192.166.0.1" + repository_type: CIFS + source_path: "cifs/R940" + file_name: "catalog.gz" + repository_username: "{{ repository_username }}" + repository_password: "{{ repository_password }}" + repository_domain: "{{ repository_domain }}" + + - name: Create a catalog using NFS share + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + catalog_name: "{{ catalog_name }}" + catalog_description: catalog description + source: "192.166.0.2" + repository_type: NFS + source_path: "/nfs/R940" + file_name: "catalog.xml" + + - name: Create a catalog using repository from Dell.com + ome_firmware_catalog: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + catalog_name: "catalog_name" + catalog_description: "catalog_description" + repository_type: "DELL_ONLINE" + check_certificate: True + + - name: Modify a catalog using a repository from CIFS share + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + catalog_name: "{{ catalog_name }}" + catalog_description: new catalog description + source: "192.166.0.2" + repository_type: CIFS + source_path: "cifs/R941" + file_name: "catalog1.gz" + repository_username: "{{ repository_username }}" + repository_password: "{{ repository_password }}" + repository_domain: "{{ repository_domain }}" + + - name: Modify a catalog using a repository from Dell.com + ome_firmware_catalog: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + catalog_id: 10 + repository_type: DELL_ONLINE + new_catalog_name: "new_catalog_name" + catalog_description: "new_catalog_description" + + - name: Delete catalog using catalog name + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + catalog_name: ["catalog_name1", "catalog_name2"] + + - name: Delete catalog using catalog id + ome_firmware_catalog: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + catalog_id: [11, 34]
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml new file mode 100644 index 00000000..198e2cce --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml @@ -0,0 +1,142 @@ +--- +- hosts: ome + connection: local + name: "OME - Update Firmware" + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Update firmware from a DUP file using a device ids + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: + - 11111 + - 22222 + dup_file: "/path/Chassis-System-Management_Firmware_6N9WN_WN64_1.00.01_A00.EXE" + + - name: Update firmware from a DUP file using a device service tags + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - KLBR111 + - KLBR222 + dup_file: "/path/Network_Firmware_NTRW0_WN64_14.07.07_A00-00_01.EXE" + + - name: Update firmware from a DUP file using a device group names + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_group_names: + - servers + dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE" + + - name: Update firmware using baseline name + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + + - name: Stage firmware for the next reboot using baseline name + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + schedule: StageForNextReboot + + - name: Update firmware using baseline name and components + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + components: + - BIOS + + - name: Update firmware of device components from a DUP file using a device ids in a baseline + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + device_id: + - 11111 + - 22222 + components: + - iDRAC with Lifecycle Controller + + - name: Update firmware of device components from a baseline using a device service tags under a baseline + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + device_service_tag: + - KLBR111 + - KLBR222 + components: + - IOM-SAS + + - name: Update firmware using baseline name with a device id and required components + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + devices: + - id: 12345 + components: + - Lifecycle Controller + - id: 12346 + components: + - Enterprise UEFI Diagnostics + - BIOS + + - name: Update firmware using baseline name with a device service tag and required components + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + devices: + - service_tag: ABCDE12 + components: + - PERC H740P Adapter + - BIOS + - service_tag: GHIJK34 + components: + - OS Drivers Pack + + - name: Update firmware using baseline name with a device service tag or device id and required components + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + devices: + - service_tag: ABCDE12 + components: + - BOSS-S1 Adapter + - PowerEdge Server BIOS + - id: 12345 + components: + - iDRAC with Lifecycle Controller diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml new file mode 100644 index 00000000..c104f3f5 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml @@ -0,0 +1,111 @@ +--- +- hosts: ome + connection: local + name: "OME - Update Firmware" + gather_facts: False + vars: + retries_count: 100 + polling_interval: 10 + all_firmware_task_tags: + - device-ids + - service-tags + - group-name + - baseline-name + - baseline-name-dup + + collections: + - dellemc.openmanage + + tasks: + - name: "Update firmware from a DUP file using a device ids." + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: + - 11111 + - 22222 + dup_file: "/path/Chassis-System-Management_Firmware_6N9WN_WN64_1.00.01_A00.EXE" + register: result + tags: + - device-ids + + - name: "Update firmware from a DUP file using a device service tags." + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - KLBR111 + - KLBR222 + dup_file: "/path/Network_Firmware_NTRW0_WN64_14.07.07_A00-00_01.EXE" + register: result + tags: + - service-tags + + - name: "Update firmware from a DUP file using a device group names." + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_group_names: + - servers + dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE" + register: result + tags: + - group-name + + - name: "Update firmware using baseline name." + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: baseline_devices + register: result + tags: + - baseline-name + + - name: "Update firmware from a DUP file using a baseline names." + ome_firmware: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + baseline_name: "baseline_devices, baseline_groups" + dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE" + tags: + - baseline-name-dup + + - name: "Track job details for the ome firmware update operation using a job id." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.update_status.Id }}" + register: job_result + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' or job_result.job_info.LastRunStatus.Name == 'Warning' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: "{{ all_firmware_task_tags }}" + + - name: "Set job fact details if the task status is warning." + set_fact: + ome_firmware_job_fact: "{{ job_result | combine(job_msg, recursive=true) }}" + failed_when: job_result.job_info.LastRunStatus.Name == 'Warning' + vars: + job_msg: {'msg': 'Completed with {{ job_result.job_info.LastRunStatus.Name|lower}}'} + when: job_result.job_info.LastRunStatus.Name == 'Warning' + tags: "{{ all_firmware_task_tags }}" + + - name: "Set job fact details if the task status is completed or failed." + set_fact: + ome_firmware_job_fact: "{{ job_result }}" + failed_when: job_result.job_info.LastRunStatus.Name == 'Failed' + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + when: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + tags: "{{ all_firmware_task_tags }}"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml new file mode 100644 index 00000000..16011809 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml @@ -0,0 +1,72 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Active Directory service configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Add Active Directory service using DNS lookup along with the test connection + ome_active_directory: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: my_ad1 + domain_server: + - domainname.com + group_domain: domainname.com + test_connection: yes + domain_username: user@domainname + domain_password: domain_password + + - name: Add Active Directory service using IP address of the domain controller with certificate validation + ome_active_directory: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: my_ad2 + domain_controller_lookup: MANUAL + domain_server: + - 192.68.20.181 + group_domain: domainname.com + validate_certificate: yes + certificate_file: "/path/to/certificate/file.cer" + + - name: Modify domain controller IP address, network_timeout and group_domain + ome_active_directory: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: my_ad2 + domain_controller_lookup: MANUAL + domain_server: + - 192.68.20.189 + group_domain: newdomain.in + network_timeout: 150 + + - name: Delete Active Directory service + ome_active_directory: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: my_ad2 + state: absent + + - name: Test connection to existing Active Directory service with certificate validation + ome_active_directory: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: my_ad2 + test_connection: yes + domain_username: user@domainname + domain_password: domain_password + validate_certificate: yes + certificate_file: "/path/to/certificate/file.cer" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml new file mode 100644 index 00000000..0099fc80 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml @@ -0,0 +1,65 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible slot name configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Rename the slots in multiple chassis using slot number and chassis service tag. + ome_chassis_slots: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + slot_options: + - chassis_service_tag: ABC1234 + slots: + - slot_number: 1 + slot_name: sled_name_1 + - slot_number: 2 + slot_name: sled_name_2 + - chassis_service_tag: ABC1235 + slots: + - slot_number: 1 + slot_name: sled_name_1 + - slot_number: 2 + slot_name: sled_name_2 + + - name: Rename single slot name of the sled using sled ID + ome_chassis_slots: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_options: + - device_id: 10054 + slot_name: slot_device_name_1 + + - name: Rename single slot name of the sled using sled service tag + ome_chassis_slots: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_options: + - device_service_tag: ABC1234 + slot_name: service_tag_slot + + - name: Rename multiple slot names of the devices + ome_chassis_slots: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_options: + - device_id: 10054 + slot_name: sled_name_1 + - device_service_tag: ABC1234 + slot_name: sled_name_2 + - device_id: 10055 + slot_name: sled_name_3 + - device_service_tag: PQR1234 + slot_name: sled_name_4 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml new file mode 100644 index 00000000..d7af342a --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml @@ -0,0 +1,167 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible device inventory details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Add devices to a static device group by using the group name and device IDs + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: "Storage Services" + device_ids: + - 11111 + - 11112 + tags: device-id + + - name: Add devices to a static device group by using the group name and device service tags + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: "Storage Services" + device_service_tags: + - GHRT2R + - KJHDF3 + tags: device-service-tags + + - name: Add devices to a static device group by using the group ID and device service tags + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + group_id: 12345 + device_service_tags: + - GHRT2R + - KJHDF3 + tags: group_id_device-service-tags + + - name: Add devices to a static device group by using the group name and IPv4 addresses + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: "Storage Services" + ip_addresses: + - 192.35.0.1 + - 192.35.0.5 + tags: group_name_ipv4 + + - name: Add devices to a static device group by using the group ID and IPv6 addresses + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + group_id: 12345 + ip_addresses: + - fe80::ffff:ffff:ffff:ffff + - fe80::ffff:ffff:ffff:2222 + tags: group_id_ipv6 + + - name: Add devices to a static device group by using the group ID and supported IPv4 and IPv6 address formats. + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + group_id: 12345 + ip_addresses: + - 192.35.0.1 + - 10.36.0.0-192.36.0.255 + - 192.37.0.0/24 + - fe80::ffff:ffff:ffff:ffff + - ::ffff:192.0.2.0/125 + - fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff + tags: group_id_ipv4_ipv6 + + - name: Remove devices from a static device group by using the group name and device IDs + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "Storage Services" + device_ids: + - 11111 + - 11112 + tags: device-id + + - name: Remove devices from a static device group by using the group name and device service tags + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "Storage Services" + device_service_tags: + - GHRT2R + - KJHDF3 + tags: device-service-tags + + - name: Remove devices from a static device group by using the group ID and device service tags + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + group_id: 12345 + device_service_tags: + - GHRT2R + - KJHDF3 + tags: group_id_device-service-tags + + - name: Remove devices from a static device group by using the group name and IPv4 addresses + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "Storage Services" + ip_addresses: + - 192.35.0.1 + - 192.35.0.5 + tags: group_name_ipv4 + + - name: Remove devices from a static device group by using the group ID and IPv6 addresses + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + group_id: 12345 + ip_addresses: + - fe80::ffff:ffff:ffff:ffff + - fe80::ffff:ffff:ffff:2222 + tags: group_id_ipv6 + + - name: Remove devices from a static device group by using the group ID and supported IPv4 and IPv6 address formats. + ome_device_group: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + group_id: 12345 + ip_addresses: + - 192.35.0.1 + - 10.36.0.0-192.36.0.255 + - 192.37.0.0/24 + - fe80::ffff:ffff:ffff:ffff + - ::ffff:192.0.2.0/125 + - fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff + tags: group_id_ipv4_ipv6 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml new file mode 100644 index 00000000..6b307749 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml @@ -0,0 +1,79 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible device inventory details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve basic inventory of all devices. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + + - name: Retrieve basic inventory for devices identified by IDs 33333 or 11111 using filtering. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fact_subset: "basic_inventory" + system_query_options: + filter: "Id eq 33333 or Id eq 11111" + + - name: Retrieve inventory details of specified devices identified by IDs 11111 and 22222. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fact_subset: "detailed_inventory" + system_query_options: + device_id: + - 11111 + - 22222 + + - name: Retrieve inventory details of specified devices identified by service tags MXL1234 and MXL4567. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fact_subset: "detailed_inventory" + system_query_options: + device_service_tag: + - MXL1234 + - MXL4567 + + - name: Retrieve details of specified inventory type of specified devices identified by ID and service tags. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fact_subset: "detailed_inventory" + system_query_options: + device_id: + - 11111 + device_service_tag: + - MXL1234 + - MXL4567 + inventory_type: "serverDeviceCards" + + - name: Retrieve subsystem health of specified devices identified by service tags. + ome_device_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + fact_subset: "subsystem_health" + system_query_options: + device_service_tag: + - MXL1234 + - MXL4567 + + diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml new file mode 100644 index 00000000..6f282c8a --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml @@ -0,0 +1,68 @@ +--- +- hosts: ome + connection: local + name: OpenManage Ansible Modules for local access settings. + gather_facts: false + collections: dellemc.openmanage + + tasks: + + - name: Configure KVM, direct access and power button settings of the chassis using device ID. + ome_device_local_access_configuration: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25011 + enable_kvm_access: true + enable_chassis_direct_access: false + chassis_power_button: + enable_chassis_power_button: false + enable_lcd_override_pin: true + disabled_button_lcd_override_pin: 123456 + tags: lac-device-id + + - name: Configure Quick sync and LCD settings of the chassis using device service tag. + ome_device_local_access_configuration: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: GHRT2RL + quick_sync: + quick_sync_access: READ_ONLY + enable_read_authentication: true + enable_quick_sync_wifi: true + enable_inactivity_timeout: true + timeout_limit: 10 + timeout_limit_unit: MINUTES + lcd: + lcd_access: VIEW_ONLY + lcd_language: en + user_defined: "LCD Text" + tags: lac-tag + + - name: Configure all local access settings of the host chassis. + ome_device_local_access_configuration: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + enable_kvm_access: true + enable_chassis_direct_access: false + chassis_power_button: + enable_chassis_power_button: false + enable_lcd_override_pin: true + disabled_button_lcd_override_pin: 123456 + quick_sync: + quick_sync_access: READ_WRITE + enable_read_authentication: true + enable_quick_sync_wifi: true + enable_inactivity_timeout: true + timeout_limit: 120 + timeout_limit_unit: SECONDS + lcd: + lcd_access: VIEW_MODIFY + lcd_language: en + user_defined: "LCD Text" + tags: lac-host diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml new file mode 100644 index 00000000..d2d86050 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml @@ -0,0 +1,52 @@ +--- +- hosts: ome + connection: local + name: OpenManage Ansible Modules + gather_facts: false + collections: dellemc.openmanage + + tasks: + + - name: Update device location settings of a chassis using the device ID. + ome_device_location: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25011 + data_center: data center 1 + room: room 1 + aisle: aisle 1 + rack: rack 1 + rack_slot: 2 + location: location 1 + tags: location-device-id + + - name: Update device location settings of a chassis using the device service tag. + ome_device_location: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: GHRT2RL + data_center: data center 1 + room: room 1 + aisle: aisle 1 + rack: rack 1 + rack_slot: 2 + location: location 1 + tags: location-device-service-tag + + - name: Update device location settings of the host chassis. + ome_device_location: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + data_center: data center 1 + room: room 1 + aisle: aisle 1 + rack: rack 1 + rack_slot: 2 + location: location 1 + tags: location-chassis diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml new file mode 100644 index 00000000..e05a3772 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml @@ -0,0 +1,105 @@ +--- +- hosts: ome + connection: local + name: Dell OME Modular device network settings. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Network settings for chassis + ome_device_mgmt_network: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: CHAS123 + delay: 10 + ipv4_configuration: + enable_ipv4: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + use_dhcp_to_obtain_dns_server_address: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: 192.168.0.5 + ipv6_configuration: + enable_ipv6: true + enable_auto_configuration: false + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: ffff::2607:f2b1:f081:9 + use_dhcpv6_to_obtain_dns_server_address: false + static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3 + static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4 + dns_configuration: + register_with_dns: true + use_dhcp_for_dns_domain_name: false + dns_name: MX-SVCTAG + dns_domain_name: dnslocaldomain + auto_negotiation: no + network_speed: 100_MB + + - name: Network settings for server + ome_device_mgmt_network: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: SRVR123 + ipv4_configuration: + enable_ipv4: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + use_dhcp_to_obtain_dns_server_address: false + static_preferred_dns_server: 192.168.0.4 + static_alternate_dns_server: 192.168.0.5 + ipv6_configuration: + enable_ipv6: true + enable_auto_configuration: false + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: ffff::2607:f2b1:f081:9 + use_dhcpv6_to_obtain_dns_server_address: false + static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3 + static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4 + + - name: Network settings for I/O module + ome_device_mgmt_network: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: IOM1234 + ipv4_configuration: + enable_ipv4: true + enable_dhcp: false + static_ip_address: 192.168.0.2 + static_subnet_mask: 255.255.254.0 + static_gateway: 192.168.0.3 + ipv6_configuration: + enable_ipv6: true + enable_auto_configuration: false + static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1 + static_prefix_length: 10 + static_gateway: ffff::2607:f2b1:f081:9 + dns_server_settings: + preferred_dns_server: 192.168.0.4 + alternate_dns_server1: 192.168.0.5 + + - name: Management VLAN configuration of chassis using device id + ome_device_mgmt_network: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_id: 12345 + management_vlan: + enable_vlan: true + vlan_id: 2345 + dns_configuration: + register_with_dns: false
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml new file mode 100644 index 00000000..0a47d2dd --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml @@ -0,0 +1,59 @@ +--- +- hosts: ome + connection: local + name: OpenManage Ansible Modules for network services settings. + gather_facts: false + collections: dellemc.openmanage + + tasks: + + - name: Update network services settings of a chassis using the device ID. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25011 + snmp_settings: + enabled: true + port_number: 161 + community_name: public + ssh_settings: + enabled: false + remote_racadm_settings: + enabled: false + tags: snmp-settings + + - name: Update network services settings of a chassis using the device service tag. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: GHRT2RL + snmp_settings: + enabled: false + ssh_settings: + enabled: true + port_number: 22 + max_sessions: 1 + max_auth_retries: 3 + idle_timeout: 1 + remote_racadm_settings: + enabled: false + tags: ssh-settings + + - name: Update network services settings of the host chassis. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25012 + snmp_settings: + enabled: false + ssh_settings: + enabled: false + remote_racadm_settings: + enabled: true + tags: racadm-settings diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml new file mode 100644 index 00000000..4b68a29b --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml @@ -0,0 +1,54 @@ +--- +- hosts: ome + connection: local + name: OpenManage Ansible Modules + gather_facts: false + collections: dellemc.openmanage + + tasks: + + - name: Update power configuration settings of a chassis using the device ID. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25011 + power_configuration: + enable_power_cap: true + power_cap: 3424 + tags: power-config + + - name: Update redundancy configuration settings of a chassis using the device service tag. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: GHRT2RL + redundancy_configuration: + redundancy_policy: GRID_REDUNDANCY + tags: redundancy-config + + - name: Update hot spare configuration settings of a chassis using device ID. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25012 + hot_spare_configuration: + enable_hot_spare: true + primary_grid: GRID_1 + tags: hostspare-config + + - name: Update power configuration settings of a host chassis. + ome_device_power_settings: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + power_configuration: + enable_power_cap: true + power_cap: 3425 + tags: power-config-chassis diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml new file mode 100644 index 00000000..71a07e68 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml @@ -0,0 +1,66 @@ +--- +- hosts: ome + connection: local + name: OpenManage Ansible Modules for Quick Deploy settings. + gather_facts: false + collections: dellemc.openmanage + + tasks: + + - name: Configure server Quick Deploy settings of the chassis using device ID. + ome_device_quick_deploy: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25011 + setting_type: ServerQuickDeploy + quick_deploy_options: + password: "password" + ipv4_enabled: True + ipv4_network_type: Static + ipv4_subnet_mask: 255.255.255.0 + ipv4_gateway: 192.168.0.1 + ipv6_enabled: True + ipv6_network_type: Static + ipv6_prefix_length: 1 + ipv6_gateway: "::" + slots: + - slot_id: 1 + slot_ipv4_address: 192.168.0.2 + slot_ipv6_address: "::" + vlan_id: 1 + - slot_id: 2 + slot_ipv4_address: 192.168.0.3 + slot_ipv6_address: "::" + vlan_id: 2 + tags: server-quick-deploy + + - name: Configure server Quick Deploy settings of the chassis using device service tag. + ome_device_quick_deploy: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: GHRT2RL + setting_type: IOMQuickDeploy + quick_deploy_options: + password: "password" + ipv4_enabled: True + ipv4_network_type: Static + ipv4_subnet_mask: 255.255.255.0 + ipv4_gateway: 192.168.0.1 + ipv6_enabled: True + ipv6_network_type: Static + ipv6_prefix_length: 1 + ipv6_gateway: "::" + slots: + - slot_id: 1 + slot_ipv4_address: 192.168.0.2 + slot_ipv6_address: "::" + vlan_id: 1 + - slot_id: 2 + slot_ipv4_address: 192.168.0.3 + slot_ipv6_address: "::" + vlan_id: 2 + tags: iom-quick-deploy diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml new file mode 100644 index 00000000..ba93eb00 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml @@ -0,0 +1,60 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible device operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Refresh Inventory + ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_action: refresh_inventory + device_service_tags: + - 2HB7NX2 + + - name: Clear iDRAC job queue + ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_action: clear_idrac_job_queue + device_service_tags: + - 2HB7NX2 + + - name: Reset iDRAC using the service tag + ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_action: reset_idrac + device_service_tags: + - 2H7HNX2 + + - name: Remove devices using servicetags + ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + device_service_tags: + - SVCTAG1 + - SVCTAF2 + + - name: Remove devices using IDs + ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + device_ids: + - 10235 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml new file mode 100644 index 00000000..b5f0fc97 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml @@ -0,0 +1,72 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible diagnostics operation. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Application log extraction using CIFS share location + ome_diagnostics: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + share_type: CIFS + share_address: "{{ share_address }}" + share_user: "{{ share_username }}" + share_password: "{{ share_password }}" + share_name: "{{ share_name }}" + log_type: application + mask_sensitive_info: false + test_connection: true + tags: app-cifs-log + + - name: Application log extraction using NFS share location + ome_diagnostics: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + share_address: "{{ share_address }}" + share_type: NFS + share_name: "{{ share_name }}" + log_type: application + mask_sensitive_info: true + test_connection: true + tags: app-nfs-log + + - name: Support assist log extraction using CIFS share location + ome_diagnostics: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + share_address: "{{ share_address }}" + share_user: "{{ share_username }}" + share_password: "{{ share_password }}" + share_name: "{{ share_name }}" + share_type: CIFS + log_type: support_assist_collection + device_ids: + - 10011 + - 10022 + log_selectors: [OS_LOGS] + test_connection: true + tags: tsr-cifs-log + + - name: Support assist log extraction using NFS share location + ome_diagnostics: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + share_address: "{{ share_address }}" + share_type: NFS + share_name: "{{ share_name }}" + log_type: support_assist_collection + device_group_name: group_name + test_connection: true + tags: tsr-nfs-log diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml new file mode 100644 index 00000000..1a16e328 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml @@ -0,0 +1,189 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible discovery operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Discover servers in a range + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discovery_server_1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.1-192.96.24.255 + device_types: + - SERVER + wsman: + username: user + password: password + tags: + - server_discovery + + - name: Discover chassis in a range + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discovery_chassis_1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.1-192.96.24.255 + device_types: + - CHASSIS + wsman: + username: user + password: password + tags: + - chassis_discovery + + - name: Discover switches in a range + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discover_switch_1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.1-192.96.24.255 + device_types: + - NETWORK SWITCH + snmp: + community: snmp_creds + tags: + - switch_discovery + + - name: Discover storage in a range + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discover_storage_1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.1-192.96.24.255 + device_types: + - STORAGE + storage: + username: user + password: password + snmp: + community: community_str + tags: + - storage_discovery + + - name: Delete a discovery job + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + discovery_job_name: "Discovery-123" + tags: + - delete_discovery + + - name: Schedule the discovery of multiple devices ignoring partial failure and enable trap to receive alerts + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + discovery_job_name: "Discovery-123" + discovery_config_targets: + - network_address_detail: + - 192.96.24.1-192.96.24.255 + - 192.96.0.0/24 + - 192.96.26.108 + device_types: + - SERVER + - CHASSIS + - STORAGE + - NETWORK SWITCH + wsman: + username: wsman_user + password: wsman_pwd + redfish: + username: redfish_user + password: redfish_pwd + snmp: + community: snmp_community + - network_address_detail: + - 192.96.25.1-192.96.25.255 + - ipmihost + - esxiserver + - sshserver + device_types: + - SERVER + ssh: + username: ssh_user + password: ssh_pwd + vmware: + username: vm_user + password: vmware_pwd + ipmi: + username: ipmi_user + password: ipmi_pwd + schedule: RunLater + cron: "0 0 9 ? * MON,WED,FRI *" + ignore_partial_failure: True + trap_destination: True + community_string: True + email_recipient: test_email@company.com + tags: + - schedule_discovery + + - name: Discover servers with ca check enabled + ome_discovery: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discovery_server_ca1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.108 + device_types: + - SERVER + wsman: + username: user + password: password + ca_check: True + certificate_data: "{{ lookup('ansible.builtin.file', '/path/to/certificate_data_file') }}" + tags: + - server_ca_check + + - name: Discover chassis with ca check enabled data + ome_discovery: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + discovery_job_name: "Discovery_chassis_ca1" + discovery_config_targets: + - network_address_detail: + - 192.96.24.108 + device_types: + - CHASSIS + redfish: + username: user + password: password + ca_check: True + certificate_data: "-----BEGIN CERTIFICATE-----\r\n + ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n + ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n + ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n + aqwertyuiopasdfghjklzxcvbnmasdasagasvv=\r\n + -----END CERTIFICATE-----" + tags: + - chassis_ca_check_data
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml new file mode 100644 index 00000000..7229f638 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml @@ -0,0 +1,59 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible AD directory user group operation. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Create Active Directory user groups. + ome_domain_user_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: present + group_name: account operators + directory_name: directory_name + role: administrator + domain_username: username@domain + domain_password: domain_password + tags: user-group-add + + - name: Create Active Directory user groups with different domain format. + ome_domain_user_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: present + group_name: account operators + directory_name: directory_name + role: administrator + domain_username: domain\\username + domain_password: domain_password + tags: user-group-add-domain + + - name: Update Active Directory user groups. + ome_domain_user_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: present + group_name: account operators + role: chassis administrator + tags: user-group-update + + - name: Remove Active Directory user groups. + ome_domain_user_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + group_name: "Administrators" + tags: user-group-remove diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml new file mode 100644 index 00000000..08b03786 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml @@ -0,0 +1,69 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible group device operations. + gather_facts: False + vars: + group_name: Dell iDRAC Servers + device_action: refresh_inventory #other options are clear_idrac_job_queue, reset_idrac + validate_certs: True + ca_path: "/path/to/ca_cert.pem" + + tasks: + - name: Retrieve group ID based on group name. + ansible.builtin.uri: + url: "https://{{ hostname }}/api/GroupService/Groups?Name={{ group_name }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200 + return_content: yes + validate_certs: "{{ validate_certs }}" + ca_path: "{{ ca_path }}" + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: group_id + + - name: Assign group ID to a variable. + set_fact: + group_id_value: "{{ group_id.json.value[0].Id }}" + + - name: Retrieve all devices under the group ID. + ansible.builtin.uri: + url: "https://{{ hostname }}/api/GroupService/Groups({{ group_id_value }})/AllLeafDevices" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200 + return_content: yes + validate_certs: "{{ validate_certs }}" + ca_path: "{{ ca_path }}" + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: all_devices + + - name: Empty list to store device IDs. + set_fact: + devices_list: [] + + - name: Add devices retrieved from a group to the list. + set_fact: + devices_list: "{{ devices_list + [item.Id] }}" + with_items: + - "{{ all_devices.json.value }}" + + - name: Perform device action tasks on devices. + dellemc.openmanage.ome_devices: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + validate_certs: "{{ validate_certs }}" + ca_path: "{{ ca_path }}" + device_action: "{{ device_action }}" + device_ids: "{{ devices_list }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml new file mode 100644 index 00000000..027a53d0 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml @@ -0,0 +1,57 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible Group configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create a new device group + ome_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + name: "group 1" + description: "Group 1 description" + parent_group_name: "group parent 1" + tags: + - create_group + + - name: Modify a device group using the group ID + ome_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + group_id: 1234 + description: "Group description updated" + parent_group_name: "group parent 2" + tags: + - modify_group + + - name: Delete a device group using the device group name + ome_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + name: "group 1" + tags: + - delete_name + + - name: Delete multiple device groups using the group IDs + ome_groups: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: absent + group_id: + - 1234 + - 5678 + tags: + - delete_ids diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml new file mode 100644 index 00000000..b5d960ca --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml @@ -0,0 +1,134 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible identity pool operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create an identity pool using ethernet, FCoE, iSCSI and FC settings." + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + pool_name: "pool1" + pool_description: "Identity pool with Ethernet, FCoE, ISCSI and FC settings" + ethernet_settings: + starting_mac_address: "50:50:50:50:50:00" + identity_count: 60 + fcoe_settings: + starting_mac_address: "aabb.ccdd.7070" + identity_count: 75 + iscsi_settings: + starting_mac_address: "60:60:60:60:60:00" + identity_count: 30 + initiator_config: + iqn_prefix: "iqn.myprefix." + initiator_ip_pool_settings: + ip_range: "10.33.0.1-10.33.0.255" + subnet_mask: "255.255.255.0" + gateway: "192.168.4.1" + primary_dns_server: "10.8.8.8" + secondary_dns_server: "8.8.8.8" + fc_settings: + starting_address: "10-10-10-10-10-10" + identity_count: 45 + tags: create1 + + - name: "Create an identity pool using only ethernet settings." + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + pool_name: "pool2" + pool_description: "Identity pool with ethernet" + ethernet_settings: + starting_mac_address: "aa-bb-cc-dd-ee-aa" + identity_count: 80 + tags: create2 + + - name: "Create an identity pool using only iSCSI settings" + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + pool_name: "pool3" + pool_description: "Identity pool with iscsi" + iscsi_settings: + starting_mac_address: "10:10:10:10:10:00" + identity_count: 30 + initiator_config: + iqn_prefix: "iqn.myprefix." + initiator_ip_pool_settings: + ip_range: "20.33.0.1-20.33.0.255" + subnet_mask: "255.255.255.0" + gateway: "192.168.4.1" + primary_dns_server: "10.8.8.8" + secondary_dns_server: "8.8.8.8" + tags: create3 + + - name: "Modify an identity pool using FC settings." + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + pool_name: "pool2" + pool_description: "Identity pool with fc_settings" + fc_settings: + starting_address: "40:40:40:40:40:22" + identity_count: 48 + tags: modify1 + + - name: "Modify an identity pool." + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + pool_name: "pool1" + new_pool_name: "pool_new" + pool_description: "modifying identity pool with ethernet and fcoe settings" + ethernet_settings: + starting_mac_address: "90-90-90-90-90-90" + identity_count: 61 + fcoe_settings: + starting_mac_address: "aabb.ccdd.5050" + identity_count: 77 + tags: modify2 + + - name: "Modify an identity pool using iSCSI and FC settings." + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + pool_name: "pool_new" + new_pool_name: "pool_new2" + pool_description: "modifying identity pool with iscsi and fc settings" + iscsi_settings: + identity_count: 99 + initiator_config: + iqn_prefix: "iqn1.myprefix2." + initiator_ip_pool_settings: + gateway: "192.168.4.5" + fc_settings: + starting_address: "10:10:10:10:10:10" + identity_count: 98 + tags: modify3 + + - name: "Delete an identity pool" + ome_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + pool_name: "pool1" + tags: delete diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml new file mode 100644 index 00000000..f90892ad --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml @@ -0,0 +1,35 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible job details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Get all jobs details. + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + + - name: Get job details for id. + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: 12345 + + - name: Get filtered job details. + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + system_query_options: + top: 2 + skip: 1 + filter: "JobType/Id eq 8"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml new file mode 100644 index 00000000..c9a8db75 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml @@ -0,0 +1,32 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage ansible port breakout configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Port breakout configuration. + ome_network_port_breakout: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + target_port: "2HB7NX2:phy-port1/1/11" + breakout_type: "1X40GE" + tags: + - port-config + + - name: Revoke the default breakout configuration. + ome_network_port_breakout: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + target_port: "2HB7NX2:phy-port1/1/11" + breakout_type: "HardwareDefault" + tags: + - port-default diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml new file mode 100644 index 00000000..b94b6b48 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml @@ -0,0 +1,37 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage ansible port breakout configuration. + gather_facts: False + vars: + retries_count: 50 + polling_interval: 5 # in seconds + + collections: + - dellemc.openmanage + + tasks: + + - name: Port breakout configuration. + ome_network_port_breakout: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + target_port: "2HB7NX2:phy-port1/1/11" + breakout_type: "1X40GE" + register: result + + - name: "Get job details using job id from port breakout configuration task." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.breakout_status.Id }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name == 'Failed' + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml new file mode 100644 index 00000000..d92ef99f --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml @@ -0,0 +1,62 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible VLAN operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create a VLAN range" + ome_network_vlan: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "vlan1" + description: "VLAN desc" + type: "General Purpose (Bronze)" + vlan_minimum: 35 + vlan_maximum: 40 + tags: create_vlan_range + + - name: "Create a VLAN with a single value" + ome_network_vlan: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "vlan2" + description: "VLAN desc" + type: "General Purpose (Bronze)" + vlan_minimum: 127 + vlan_maximum: 127 + tags: create_vlan_single + + - name: "Modify a VLAN" + ome_network_vlan: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "vlan1" + new_name: "vlan_gold1" + description: "new description" + type: "General Purpose (Gold)" + vlan_minimum: 45 + vlan_maximum: 50 + tags: modify_vlan + + - name: "Delete a VLAN" + ome_network_vlan: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "vlan1" + tags: delete_vlan diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml new file mode 100644 index 00000000..3cf9c3c2 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml @@ -0,0 +1,32 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible OpenManage Enterprise network vlan details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve information about all network VLANs(s) available in the device. + ome_network_vlan_info: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + + - name: Retrieve information about a network VLAN using the VLAN ID. + ome_network_vlan_info: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + id: 12345 + + - name: Retrieve information about a network VLAN using the VLAN name. + ome_network_vlan_info: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + name: "Network VLAN - 1" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml new file mode 100644 index 00000000..87c124b8 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml @@ -0,0 +1,33 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible server interface profile information. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieves the server interface profiles of all the device using device ID. + ome_server_interface_profile_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: + - 10001 + - 10002 + tags: + - sip-device-id + + - name: Retrieves the server interface profiles of all the device using device service tag. + ome_server_interface_profile_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - 6GHH6H2 + - 6KHH6H3 + tags: + - sip-service-tag diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml new file mode 100644 index 00000000..485a1a24 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml @@ -0,0 +1,125 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible server interface profile workflow. + gather_facts: False + vars: + retries_count: 100 + polling_interval: 10 #in seconds + src_service_tag: 7GHH6H1 + + collections: + - dellemc.openmanage + + tasks: + + - name: Create a smart fabric. + ome_smart_fabric: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "fabric1" + description: "fabric desc" + fabric_design: "2xMX9116n_Fabric_Switching_Engines_in_same_chassis" + primary_switch_service_tag: "6H7J6Z2" + secondary_switch_service_tag: "59HW8X2" + override_LLDP_configuration: "Enabled" + register: fabric_result + + - name: "sleep for 300 seconds and continue with play" + wait_for: + timeout: 300 + when: fabric_result.changed == True + + - name: Create a template from a reference device service tag. + ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: "{{ src_service_tag }}" + attributes: + Name: "New_Template_2" + Description: "New Template description" + register: result + failed_when: "'return_id' not in result" + + - name: "Get the job id using return id from template." + ome_template_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + template_id: "{{ result.return_id }}" + register: facts_result + + - name: "Get job details using job id from template task." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ facts_result.template_info[hostname].TaskId }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name == 'Failed' + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: Deploy template on multiple devices + dellemc.openmanage.ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: "{{ result.return_id }}" + device_service_tag: + - 6GHH6H1 + - 6GHH6H2 + register: deploy_result + + - name: "sleep for 10 seconds and continue with play" + wait_for: timeout=10 + + - name: "Track the deploy job till completion" + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ deploy_result.return_id }}" + register: deploy_job_result + failed_when: "'job_info' not in deploy_job_result" + until: deploy_job_result.job_info.LastRunStatus.Name == 'Completed' or deploy_job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: Modify Server Interface Profile for the server using the service tag. + ome_server_interface_profiles: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - 6GHH6H2 + nic_teaming: NoTeaming + nic_configuration: + - nic_identifier: NIC.Mezzanine.1A-1-1 + team: no + untagged_network: 2 + tagged_networks: + names: + - vlan + + - name: Retrieves the server interface profiles of all the device using device service tag. + ome_server_interface_profile_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - 6GHH6H2 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml new file mode 100644 index 00000000..c003b714 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml @@ -0,0 +1,57 @@ +--- +- hosts: omem + connection: local + name: Dell OpenManage Ansible server interface profiles configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Modify Server Interface Profile for the server using the service tag + ome_server_interface_profiles: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: + - SVCTAG1 + - SVCTAG2 + nic_teaming: LACP + nic_configuration: + - nic_identifier: NIC.Mezzanine.1A-1-1 + team: no + untagged_network: 2 + tagged_networks: + names: + - vlan1 + - nic_identifier: NIC.Mezzanine.1A-2-1 + team: yes + untagged_network: 3 + tagged_networks: + names: + - range120-125 + + - name: Modify Server Interface Profile for the server using the id + ome_server_interface_profiles: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_id: + - 34523 + - 48999 + nic_teaming: NoTeaming + nic_configuration: + - nic_identifier: NIC.Mezzanine.1A-1-1 + team: no + untagged_network: 2 + tagged_networks: + names: + - vlan2 + - nic_identifier: NIC.Mezzanine.1A-2-1 + team: yes + untagged_network: 3 + tagged_networks: + names: + - range120-125 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml new file mode 100644 index 00000000..3813458a --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml @@ -0,0 +1,47 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible smart fabric operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create a smart fabric" + ome_smart_fabric: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "fabric1" + description: "fabric desc" + fabric_design: "2xMX9116n_Fabric_Switching_Engines_in_different_chassis" + primary_switch_service_tag: "SVTG123" + secondary_switch_service_tag: "PXYT456" + override_LLDP_configuration: "Enabled" + tags: create_smart_fabric + + - name: "Modify a smart fabric" + ome_smart_fabric: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: present + name: "fabric1" + new_name: "fabric_gold1" + description: "new description" + tags: modify_smart_fabric + + + - name: "Delete a smart fabric" + ome_smart_fabric: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "fabric1" + tags: delete_smart_fabric diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml new file mode 100644 index 00000000..88b5cc62 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml @@ -0,0 +1,119 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible smart fabric uplink configuration. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create a Uplink" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + fabric_name: "fabric1" + name: "uplink1" + description: "CREATED from OMAM" + uplink_type: "Ethernet" + ufd_enable: "Enabled" + primary_switch_service_tag: "ABC1234" + primary_switch_ports: + - ethernet1/1/13 + - ethernet1/1/14 + secondary_switch_service_tag: "XYZ1234" + secondary_switch_ports: + - ethernet1/1/13 + - ethernet1/1/14 + tagged_networks: + - vlan1 + - vlan3 + untagged_network: vlan2 + tags: create_uplink + + - name: "modify a existing uplink1" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + fabric_name: "fabric1" + name: "uplink1" + new_name: "uplink2" + description: "Modified from OMAM" + uplink_type: "Ethernet" + ufd_enable: "Disabled" + primary_switch_service_tag: "DEF1234" + primary_switch_ports: + - ethernet1/2/13 + - ethernet1/2/14 + secondary_switch_service_tag: "TUV1234" + secondary_switch_ports: + - ethernet1/2/13 + - ethernet1/2/14 + tagged_networks: + - vlan11 + - vlan33 + untagged_network: vlan22 + tags: modify_uplink + + - name: "Delete a Uplink" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + fabric_name: "fabric1" + name: "uplink1" + tags: delete_uplink + + - name: "Modify the Uplink name" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + fabric_name: "fabric1" + name: "uplink1" + new_name: "uplink2" + tags: modify_uplink_name + + - name: "Modify a Uplink ports" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + fabric_name: "fabric1" + name: "uplink1" + description: "uplink ports modified" + primary_switch_service_tag: "ABC1234" + primary_switch_ports: + - ethernet1/1/6 + - ethernet1/1/7 + secondary_switch_service_tag: "XYZ1234" + secondary_switch_ports: + - ethernet1/1/9 + - ethernet1/1/10 + tags: modify_ports + + - name: "Modify Uplink networks" + ome_smart_fabric_uplink: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + fabric_name: "fabric1" + name: "create1" + description: "uplink networks modified" + tagged_networks: + - vlan4 + tags: modify_networks diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml new file mode 100644 index 00000000..433954aa --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml @@ -0,0 +1,31 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible template identity pool attach and detach operation. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + + - name: Attach an identity pool to a template. + ome_template_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + template_name: template_name + identity_pool_name: identity_pool_name + tags: + - attach + + - name: Detach an identity pool from a template. + ome_template_identity_pool: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + template_name: template_name + tags: + - detach
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml new file mode 100644 index 00000000..517ff118 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml @@ -0,0 +1,51 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible - OME Power state operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Power state operation based on device id. + ome_powerstate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 11111 + power_state: "off" + + - name: Power state operation based on device service tag. + ome_powerstate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: "KLBR111" + power_state: "on" + + - name: Power state operation based on list of device ids. + ome_powerstate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: "{{ item.device_id }}" + power_state: "{{ item.state }}" + with_items: + - { "device_id": 11111, "state": "on" } + - { "device_id": 22222, "state": "off" } + + - name: Power state operation based on list of device service tags. + ome_powerstate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: "{{ item.service_tag }}" + power_state: "{{ item.state }}" + with_items: + - { "service_tag": "KLBR111", "state": "on" } + - { "service_tag": "KLBR222", "state": "off" }
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml new file mode 100644 index 00000000..8393992a --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml @@ -0,0 +1,36 @@ +--- +- hosts: ome + vars: + retries_count: 5 + polling_interval: 5 #in seconds + connection: local + name: "OME - Power state management job tracking." + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Power state operation based on device id" + ome_powerstate: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + power_state: "off" + device_id: 11111 + register: result + failed_when: "'job_status' not in result" + + - name: "Get job details using job id from power state operation." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{result.job_status.Id}}" + register: job_result + failed_when: "'job_info' not in job_result" + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml new file mode 100644 index 00000000..14d43e6a --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml @@ -0,0 +1,212 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible profile operations. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Create two profiles from a template + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + name_prefix: "omam_profile" + number_of_profiles: 2 + tags: + - create_profile + + - name: Create profile with NFS share + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + name_prefix: "omam_profile" + number_of_profiles: 1 + boot_to_network_iso: + boot_to_network: True + share_type: "NFS" + share_ip: "192.168.0.1" + iso_path: "/path/to/my_iso.iso" + iso_timeout: 8 + tags: + - create_profile_nfs + + - name: Create profile with CIFS share + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: create + template_name: "template 1" + name_prefix: "omam_profile" + number_of_profiles: 1 + boot_to_network_iso: + boot_to_network: True + share_type: CIFS + share_ip: "192.168.0.2" + share_user: "username" + share_password: "password" + workgroup: "workgroup" + iso_path: "\\path\\to\\my_iso.iso" + iso_timeout: 8 + tags: + - create_profile_cifs + + - name: Modify profile name with NFS share and attributes + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: modify + name: "Profile 00001" + new_name: "modified profile" + description: "new description" + boot_to_network_iso: + boot_to_network: True + share_type: NFS + share_ip: "192.168.0.1" + iso_path: "/path/to/my_iso.iso" + iso_timeout: 8 + attributes: + Attributes: + - Id: 4506 + Value: "server attr 1" + IsIgnored: true + - Id: 4507 + Value: "server attr 2" + IsIgnored: true + - DisplayName: 'System, Server Topology, ServerTopology 1 Aisle Name' + Value: Aisle 5 + IsIgnored: false + tags: + - modify_profile + + - name: Delete using profile name + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "delete" + name: "Profile 00003" + tags: + - delete_profile_name + + - name: Delete using filter + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "delete" + filters: + SelectAll: True + Filters: =contains(ProfileName,'Profile 00002') + tags: + - delete_filter + + - name: Delete using profile list filter + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "delete" + filters: + ProfileIds: + - 17123 + - 12124 + tags: + - delete_profile_ids + + - name: Assign profile name with network share + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: assign + name: "Profile 00001" + device_id: 12456 + boot_to_network_iso: + boot_to_network: True + share_type: NFS + share_ip: "192.168.0.1" + iso_path: "/path/to/my_iso.iso" + iso_timeout: 8 + attributes: + Attributes: + - Id: 4506 + Value: "server attr 1" + IsIgnored: true + Options: + ShutdownType: 0 + TimeToWaitBeforeShutdown: 300 + EndHostPowerState: 1 + StrictCheckingVlan: True + Schedule: + RunNow: True + RunLater: False + tags: + - assign_profile + + - name: Unassign using profile name + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "unassign" + name: "Profile 00003" + tags: + - unassign_profile_name + + - name: "Unassign using filters" + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "unassign" + filters: + SelectAll: True + Filters: =contains(ProfileName,'Profile 00003') + tags: + - unassign_filter + + - name: Unassign using filter + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "unassign" + filters: + ProfileIds: + - 17123 + - 16123 + tags: + - unassign_profile_list + + - name: Migrate profile + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "migrate" + name: "Profile 0001" + device_id: 12456 + tags: + - migrate_profile
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml new file mode 100644 index 00000000..d4c9c772 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml @@ -0,0 +1,47 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible profile operations. + gather_facts: False + vars: + retries_count: 120 + polling_interval: 30 # 30 seconds x 120 times = 1 hour + failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped', + 'Canceled'] + completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused', + 'Stopped', 'Canceled'] + + collections: + - dellemc.openmanage + + tasks: + - name: Assign a profile to target + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "assign" + name: "Profile 00001" + device_id: 12456 + register: result + + - name: End play when no job_id in result + meta: end_play + when: + - result.changed == false + - "'job_id' not in result" + + - name: Get job details using job id + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.job_id }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}" + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}" + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml new file mode 100644 index 00000000..ae7f732b --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml @@ -0,0 +1,48 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible profile operations. + gather_facts: False + vars: + retries_count: 120 + polling_interval: 30 # 30 seconds x 120 times = 1 hour + failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped', + 'Canceled'] + completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused', + 'Stopped', 'Canceled'] + + collections: + - dellemc.openmanage + + tasks: + + - name: Migrate a profile + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "migrate" + name: "Profile 00001" + device_id: 12456 + register: result + + - name: End play when no job_id in result + meta: end_play + when: + - result.changed == false + - "'job_id' not in result" + + - name: Get job details using job id + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.job_id }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}" + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}" + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml new file mode 100644 index 00000000..b1a21312 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml @@ -0,0 +1,47 @@ +--- +- hosts: ome + connection: local + name: Dell EMC OpenManage Ansible profile operations. + gather_facts: False + vars: + retries_count: 120 + polling_interval: 30 # 30 seconds x 120 times = 1 hour + failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped', + 'Canceled'] + completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused', + 'Stopped', 'Canceled'] + + collections: + - dellemc.openmanage + + tasks: + + - name: Unassign using profile name + ome_profile: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "unassign" + name: "Profile 00003" + register: result + + - name: End play when no job_id in result + meta: end_play + when: + - result.changed == false + - "'job_id' not in result" + + - name: Get job details using job id + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.job_id }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}" + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}" + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml new file mode 100644 index 00000000..58ac15ff --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml @@ -0,0 +1,338 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible device Template service. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create a template from a reference device." + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + device_id: 25123 + attributes: + Name: "New Template" + Description: "New Template description" + + - name: "Modify template name, description, and attribute value." + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "modify" + template_id: 12 + attributes: + Name: "New Custom Template" + Description: "Custom Template Description" + # Attributes to be modified in the template. + # For information on any attribute id, use API /TemplateService/Templates(Id)/Views(Id)/AttributeViewDetails + # This section is optional + Attributes: + - Id: 1234 + Value: "Test Attribute" + IsIgnored: false + + - name: Modify template name, description, and attribute using detailed view + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "modify" + template_id: 12 + attributes: + Name: "New Custom Template" + Description: "Custom Template Description" + Attributes: + # Enter the comma separated string as appearing in the Detailed view on GUI + # NIC -> NIC.Integrated.1-1-1 -> NIC Configuration -> Wake On LAN1 + - DisplayName: 'NIC, NIC.Integrated.1-1-1, NIC Configuration, Wake On LAN' + Value: Enabled + IsIgnored: false + # System -> LCD Configuration -> LCD 1 User Defined String for LCD + - DisplayName: 'System, LCD Configuration, LCD 1 User Defined String for LCD' + Value: LCD str by OMAM + IsIgnored: false + + - name: "Deploy template on multiple devices " + ome_template: + hostname: "192.168.0.1" + username: "username" + password: "password" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_id: + - 12765 + - 10173 + device_service_tag: + - 'SVTG123' + - 'SVTG456' + + - name: Deploy template on groups + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_group_names: + - server_group_1 + - server_group_2 + + - name: "Deploy template on multiple devices along attributes modification for target device" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_id: + - 12765 + - 10173 + device_service_tag: + - 'SVTG123' + attributes: + # Device specific attributes to be modified during deployment. + # For information on any attribute id, use API /TemplateService/Templates(Id)/Views(Id)/AttributeViewDetails + # This section is optional + Attributes: + # specific device where attribute to be modified at deployment run-time. + # The DeviceId should be mentioned above in the 'device_id' section. + # Service tags not allowed. + - DeviceId: 12765 + Attributes: + - Id: 15645 + Value: "0.0.0.0" + IsIgnored: false + - DeviceId: 10173 + Attributes: + - Id: 18968, + Value: "hostname-1" + IsIgnored: false + + - name: "Deploy template and Operating System (OS) on multiple devices" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_id: + - 12765 + device_service_tag: + - 'SVTG123' + attributes: + # Include this to install OS on the devices. + # This section is optional + NetworkBootIsoModel: + BootToNetwork: true + ShareType: "NFS" + IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours + IsoPath: "/home/iso_path/filename.iso" + ShareDetail: + IpAddress: "192.168.0.2" + ShareName: "sharename" + User: "share_user" + Password: "share_password" + Options: + EndHostPowerState: 1 + ShutdownType: 0 + TimeToWaitBeforeShutdown: 300 + Schedule: + RunLater: true + RunNow: false + + - name: "Deploy template on multiple devices and changes the device-level attributes. After the template is deployed, + install OS using its image." + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_id: + - 12765 + - 10173 + device_service_tag: + - 'SVTG123' + - 'SVTG456' + attributes: + Attributes: + - DeviceId: 12765 + Attributes: + - Id: 15645 + Value: "0.0.0.0" + IsIgnored: false + - DeviceId: 10173 + Attributes: + - Id: 18968, + Value: "hostname-1" + IsIgnored: false + NetworkBootIsoModel: + BootToNetwork: true + ShareType: "NFS" + IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours + IsoPath: "/home/iso_path/filename.iso" + ShareDetail: + IpAddress: "192.168.0.2" + ShareName: "sharename" + User: "share_user" + Password: "share_password" + Options: + EndHostPowerState: 1 + ShutdownType: 0 + TimeToWaitBeforeShutdown: 300 + Schedule: + RunLater: true + RunNow: false + + - name: "delete template" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "delete" + template_id: 12 + + - name: "export a template" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "export" + template_id: 12 + + # Start of example to export template to a local xml file + - name: "export template to a local xml file" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "export" + template_name: "my_template" + register: result + tags: + - export_xml_to_file + - ansible.builtin.copy: + content: "{{ result.Content}}" + dest: "/path/to/exported_template.xml" + tags: + - export_xml_to_file + # End of example to export template to a local xml file + + - name: "clone a template" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "clone" + template_id: 12 + attributes: + Name: "New Cloned Template Name" + + - name: "import template from XML content" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "import" + attributes: + Name: "Imported Template Name" + # Template Type from TemplateService/TemplateTypes + Type: 2 + # xml string content + Content: "<SystemConfiguration Model=\"PowerEdge R940\" ServiceTag=\"SVCTAG1\" + TimeStamp=\"Tue Sep 24 09:20:57.872551 2019\">\n<Component FQDD=\"AHCI.Slot.6-1\">\n<Attribute + Name=\"RAIDresetConfig\">True</Attribute>\n<Attribute Name=\"RAIDforeignConfig\">Clear</Attribute>\n + </Component>\n<Component FQDD=\"Disk.Direct.0-0:AHCI.Slot.6-1\">\n<Attribute Name=\"RAIDPDState\">Ready + </Attribute>\n<Attribute Name=\"RAIDHotSpareStatus\">No</Attribute>\n</Component>\n + <Component FQDD=\"Disk.Direct.1-1:AHCI.Slot.6-1\">\n<Attribute Name=\"RAIDPDState\">Ready</Attribute>\n + <Attribute Name=\"RAIDHotSpareStatus\">No</Attribute>\n</Component>\n</SystemConfiguration>\n" + + - name: "import template from local XML file" + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "import" + attributes: + Name: "Imported Template Name" + Type: 2 + Content: "{{ lookup('ansible.builtin.file', '/path/to/xmlfile') }}" + + - name: "Deploy template and Operating System (OS) on multiple devices." + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "deploy" + template_id: 12 + device_id: + - 12765 + device_service_tag: + - 'SVTG123' + attributes: + # Include this to install OS on the devices. + # This section is optional + NetworkBootIsoModel: + BootToNetwork: true + ShareType: "CIFS" + IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours + IsoPath: "/home/iso_path/filename.iso" + ShareDetail: + IpAddress: "192.168.0.2" + ShareName: "sharename" + User: "share_user" + Password: "share_password" + Options: + EndHostPowerState: 1 + ShutdownType: 0 + TimeToWaitBeforeShutdown: 300 + Schedule: + RunLater: true + RunNow: false + + - name: Create a compliance template from reference device + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "create" + device_service_tag: "SVTG123" + template_view_type: "Compliance" + attributes: + Name: "Configuration Compliance" + Description: "Configuration Compliance Template" + Fqdds: "BIOS" + + - name: Import a compliance template from XML file + ome_template: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + command: "import" + template_view_type: "Compliance" + attributes: + Name: "Configuration Compliance" + Content: "{{ lookup('ansible.builtin.file', './test.xml') }}" + Type: 2 diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml new file mode 100644 index 00000000..40f4c002 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml @@ -0,0 +1,129 @@ +--- +- hosts: ome + connection: local + name: "Creates a new template from the provided reference server device. + Track the template creation job till completion. + Fetch the Attribute specific to LCD Configuration settings from the attribute view of the created template. + Modify the created template with the user defined LCD string." + gather_facts: False + vars: + retries_count: 50 + polling_interval: 5 + reference_device: "MXL4567" + template_name: "LCD String Deploy Template" + lcd_display_string: "LCD Custom Display Message" + + collections: + - dellemc.openmanage + + tasks: + - name: "create template from the reference server" + ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_service_tag: "{{ reference_device }}" + attributes: + Name: "{{ template_name }}" + Description: "LCD Template description" + register: result + + - name: "sleep for 30 seconds and continue with play" + wait_for: timeout=30 + + - name: "Fetch the Task ID from the Template Details using the Template ID" + ome_template_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + template_id: "{{ result.return_id }}" + register: template_result + + - name: "Track the Template Creation Job till Completion" + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ template_result.template_info[hostname].TaskId }}" + register: job_result + failed_when: "'job_info' not in job_result" + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: "Retrieve the Attribute ID specific to LCD Configuration" + uri: + url: "https://{{ hostname }}/api/TemplateService/Templates({{ result.return_id }})/Views(1)/AttributeViewDetails" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: config_result + + - name: "System Attribute Groups" + set_fact: + lcd_fact: "{{ item }}" + when: + - item.DisplayName=='System' + with_items: + - "{{ config_result.json.AttributeGroups }}" + loop_control: + label: "{{ config_result.json.Name }}" + + - name: "LCD System Attributes Groups" + set_fact: + lcdconfig: "{{ item }}" + when: + - item.DisplayName=='LCD Configuration' + with_items: + - "{{ lcd_fact.SubAttributeGroups }}" + loop_control: + label: "{{ item.DisplayName }}" + + - name: "Retrieve LCD Display Attribute ID" + set_fact: + lcdattrid: "{{ item.AttributeId }}" + when: + - item.DisplayName=='LCD 1 User Defined String for LCD' + with_items: + - "{{ lcdconfig.Attributes }}" + loop_control: + label: "{{ item.DisplayName }}" + + - name: "Retrieve LCD Config Attribute ID" + set_fact: + lcdconfigattrid: "{{ item.AttributeId }}" + when: + - item.DisplayName=='LCD 1 LCD Configuration' + with_items: + - "{{ lcdconfig.Attributes }}" + loop_control: + label: "{{ item.DisplayName }}" + + - name: "Modify the created with Custom LCD String to be displayed" + ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "modify" + template_id: "{{ result.return_id }}" + attributes: + Name: "{{ template_name }}" + Attributes: + - Id: "{{ lcdattrid }}" + Value: "{{ lcd_display_string }}" + IsIgnored: false + - Id: "{{ lcdconfigattrid }}" + Value: "User Defined" + IsIgnored: false
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml new file mode 100644 index 00000000..3fd200c0 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml @@ -0,0 +1,33 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible template inventory details. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve basic details of all templates. + ome_template_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + + - name: Retrieve details of a specific template identified by its template ID. + ome_template_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + template_id: "{{template_id}}" + + - name: Get filtered template info based on name. + ome_template_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + system_query_options: + filter: "Name eq 'new template'"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml new file mode 100644 index 00000000..eb040c9c --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml @@ -0,0 +1,27 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible device Template service. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: get template with filter option. + register: result + failed_when: "'template_info' not in result or result.template_info['{{hostname}}']['@odata.count'] == 0" + ome_template_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + system_query_options: + filter: "Name eq 'template_name'" + - name: get specific template from result + with_subelements: + - "{{ result.template_info }}" + - value + debug: + msg: "{{item.1}}" + when: item.1['Name']=='template_name' diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml new file mode 100644 index 00000000..afb472fa --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml @@ -0,0 +1,46 @@ +--- +- hosts: ome + connection: local + name: + - Deploy this template with desired LCD string on the target servers. + - Track the template deploy operation job till completion. + gather_facts: False + vars: + retries_count: 50 + polling_interval: 5 + template_name: "LCD Srting Deploy Template" + deployable_servicetag: + - 'MXL1234' + - 'MXL4567' + + collections: + - dellemc.openmanage + + tasks: + - name: "Deploy Previously created LCD Template " + ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "deploy" + template_name: "{{ template_name }}" + device_service_tag: "{{ deployable_servicetag }}" + register: result + tags: + - deploy + + - name: "Track the deploy job till completion" + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ result.return_id }}" + register: job_result + failed_when: "'job_info' not in job_result" + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: + - track_deploy
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml new file mode 100644 index 00000000..fee07b4e --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml @@ -0,0 +1,66 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible template tag and untag. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Tag or untag vlans in template + ome_template_network_vlan: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + template_id: 78 + nic_identifier: NIC Slot 4 + untagged_networks: + - port: 1 + untagged_network_id: 12765 + - port: 2 + untagged_network_name: vlan2 + tagged_networks: + - port: 1 + tagged_network_ids: + - 12767 + - 12768 + - port: 4 + tagged_network_ids: + - 12767 + - 12768 + tagged_network_names: + - vlan3 + - port: 2 + tagged_network_names: + - vlan4 + - vlan1 + tags: + - tag_untag_vlan + + - name: Clear the tagged and untagged vLANs + ome_template_network_vlan: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + template_id: 78 + nic_identifier: NIC Slot 4 + untagged_networks: + # For removing the untagged vLANs for the port 1 and 2 + - port: 1 + untagged_network_id: 0 + - port: 2 + untagged_network_name: 0 + tagged_networks: + # For removing the tagged vLANs for port 1 and 4 + - port: 1 + tagged_network_ids: [] + - port: 4 + tagged_network_ids: [] + tagged_network_names: [] + - port: 2 + tagged_network_names: [] + tags: + - clear_tagged_untagged
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml new file mode 100644 index 00000000..9f93bbdf --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml @@ -0,0 +1,48 @@ +--- +- hosts: ome + vars: + retries_count: 50 + polling_interval: 5 #in seconds + connection: local + name: "OME - Create Template details tracking" + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: "Create template based on device id." + ome_template: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + device_id: 12475 + attributes: + Name: "New Template" + Description: "New Template description" + register: result + failed_when: "'return_id' not in result" + + - name: "Get the job id using return id from template." + ome_template_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + template_id: "{{ result.return_id }}" + register: facts_result + + - name: "Get job details using job id from template task." + ome_job_info: + hostname: "{{ hostname }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + job_id: "{{ facts_result.template_info[hostname].TaskId }}" + register: job_result + failed_when: job_result.job_info.LastRunStatus.Name == 'Failed' + changed_when: job_result.job_info.LastRunStatus.Name == 'Completed' + until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml new file mode 100644 index 00000000..b1589cae --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml @@ -0,0 +1,70 @@ +--- +- hosts: ome + connection: local + name: Dell OpenManage Ansible User service. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: create new user. + ome_user: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + attributes: + UserName: "user1" + Password: "UserPassword" + RoleId: "10" + Enabled: True + + - name: create user with all parameters + ome_user: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + attributes: + UserName: "user2" + Description: "user2 description" + Password: "UserPassword" + RoleId: "10" + Enabled: True + DirectoryServiceId: 0 + UserTypeId: 1 + Locked: False + Name: "user2" + + - name: modify existing user + ome_user: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + attributes: + UserName: "user3" + RoleId: "10" + Enabled: True + Description: "Modify user Description" + + - name: delete existing user using id. + ome_user: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + user_id: 61874 + + - name: delete existing user using name. + ome_user: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + name: "name"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml new file mode 100644 index 00000000..6016d502 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml @@ -0,0 +1,33 @@ +--- +- hosts: ome + connection: local + name: Fetching ome user facts. + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Retrieve basic details of all accounts. + ome_user_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + + - name: Retrieve details of a specific account identified by its account ID. + ome_user_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + account_id: "{{account_id}}" + + - name: Retrieve details of a specific user using filter with UserName. + ome_user_info: + hostname: "{{hostname}}" + username: "{{username}}" + password: "{{password}}" + ca_path: "/path/to/ca_cert.pem" + system_query_options: + filter: "UserName eq 'test'"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware.yml new file mode 100644 index 00000000..15fa188d --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware.yml @@ -0,0 +1,32 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Ansible Module for Simple Firmware Update" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Update the firmware from a single executable file available in a local path" + redfish_firmware: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + image_uri: "/home/firmware_repo/component.exe" + + tags: + - local-update + + - name: "Update the firmware from a single executable file available in a HTTP protocol" + redfish_firmware: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + image_uri: "http://192.168.0.1/firmware_repo/component.exe" + + tags: + - http-update diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_http_jobtracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_http_jobtracking.yml new file mode 100644 index 00000000..105f4189 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_http_jobtracking.yml @@ -0,0 +1,92 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Ansible Module for Simple Firmware Update" + vars: + retries_count: 100 + polling_interval: 5 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Update the firmware from a single executable file available in a HTTP protocol" + redfish_firmware: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + image_uri: "http://192.168.0.1/firmware_repo/component.exe" + register: result + + - name: "Update the firmware from a single executable with job tracking till completion" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: "Update the firmware from a single executable reboot." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "Update the firmware from a single executable Waits for 4 minutes." + wait_for: + timeout: 240 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "Update the firmware from a single executable with job tracking till completion." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: "Update the firmware from a single executable fact." + set_fact: + job_details: "{{ final_result.json }}" + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + changed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus == "OK"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_local_jobtracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_local_jobtracking.yml new file mode 100644 index 00000000..8ea91cc3 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/firmware/redfish_firmware_from_local_jobtracking.yml @@ -0,0 +1,92 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Ansible Module for Simple Firmware Update" + vars: + retries_count: 100 + polling_interval: 5 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + + - name: "Update the firmware from a single executable file available in a local path" + redfish_firmware: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + image_uri: "/home/firmware_repo/component.exe" + register: result + + - name: "Update the firmware from a single executable with job tracking till completion." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: "Update the firmware from a single executable reboot." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "Update the firmware from a single executable Waits for 4 minutes." + wait_for: + timeout: 240 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "Update the firmware from a single executable with job tracking till completion." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + + - name: "Update the firmware from a single executable fact." + set_fact: + job_details: "{{ final_result.json }}" + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + changed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus == "OK"
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_event_subscription.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_event_subscription.yml new file mode 100644 index 00000000..7fa5e40c --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_event_subscription.yml @@ -0,0 +1,46 @@ +--- +- hosts: redfish + connection: local + name: Configure Redfish subscriptions + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Add Redfish metric subscription + redfish_event_subscription: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + destination: "https://192.168.1.100:8188" + event_type: MetricReport + event_format_type: MetricReport + state: present + + tags: add_metric_subscription + + - name: Add Redfish alert subscription + redfish_event_subscription: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + destination: "https://server01.example.com:8188" + event_type: Alert + event_format_type: Event + state: present + + tags: add_alert_subscription + + - name: Delete Redfish subscription with a specified destination + redfish_event_subscription: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + destination: "https://server01.example.com:8188" + state: absent + + tags: delete_subscription
\ No newline at end of file diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_powerstate.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_powerstate.yml new file mode 100644 index 00000000..bacce0cc --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/redfish_powerstate.yml @@ -0,0 +1,26 @@ +--- +- hosts: redfish + connection: local + name: Configure Server Power Setting + gather_facts: False + + collections: + - dellemc.openmanage + + tasks: + - name: Manage power state of the first device. + redfish_powerstate: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + reset_type: "On" + + - name: Manage power state of a specified device. + redfish_powerstate: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + reset_type: "ForceOff" + resource_id: "System.Embedded.1" diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume.yml new file mode 100644 index 00000000..0c1380a0 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume.yml @@ -0,0 +1,85 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Redfish Storage Volume - Ansible Module" + vars: + retries_count: 15 + polling_interval: 5 + + collections: + - dellemc.openmanage + + tasks: + + - name: Create a volume with supported options. + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + volume_type: "Mirrored" + name: "VD0" + controller_id: "RAID.Slot.1-1" + drives: + - Disk.Bay.5:Enclosure.Internal.0-1:RAID.Slot.1-1 + - Disk.Bay.6:Enclosure.Internal.0-1:RAID.Slot.1-1 + block_size_bytes: 512 + capacity_bytes: 299439751168 + optimum_io_size_bytes: 65536 + encryption_types: NativeDriveEncryption + encrypted: true + register: result + tags: + - create_volume1 + + - name: Create a volume with minimum options. + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + controller_id: "RAID.Slot.1-1" + volume_type: "NonRedundant" + drives: + - Disk.Bay.1:Enclosure.Internal.0-1:RAID.Slot.1-1 + tags: + - create_volume2 + + - name: Modify a volume's encryption type settings. + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + volume_id: "Disk.Virtual.5:RAID.Slot.1-1" + encryption_types: "ControllerAssisted" + encrypted: true + tags: + - modify_volume + + - name: Initialize an existing volume. + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "initialize" + volume_id: "Disk.Virtual.5:RAID.Slot.1-1" + initialize_type: "Slow" + tags: + - initialize_volume + + - name: Delete an existing volume. + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + volume_id: "Disk.Virtual.5:RAID.Slot.1-1" + tags: + - delete_volume diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_create_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_create_job_tracking.yml new file mode 100644 index 00000000..fcf596cd --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_create_job_tracking.yml @@ -0,0 +1,93 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Redfish Storage Volume - Ansible Module" + vars: + retries_count: 100 + polling_interval: 10 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + - name: "Create a storage volume" + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + name: "VD_BOSS" + controller_id: "AHCI.Slot.6-1" + drives: + - Disk.Direct.1-1:AHCI.Slot.6-1 + - Disk.Direct.0-0:AHCI.Slot.6-1 + optimum_io_size_bytes: 65536 + volume_type: Mirrored + register: result + tags: + - create_volume + + - name: "View the job details to track the status of the create storage volume task" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + failed_when: "'json' not in job_result" + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: + - job-tracking + + - name: "Reboot the system if the job status is pending." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "View the job details to verify if the task status is completed." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_delete_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_delete_job_tracking.yml new file mode 100644 index 00000000..34a821d7 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_delete_job_tracking.yml @@ -0,0 +1,87 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Redfish Storage Volume - Ansible Module" + vars: + retries_count: 100 + polling_interval: 10 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + - name: "Delete an existing volume." + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "absent" + volume_id: "Disk.Virtual.1:RAID.Slot.1-1" + register: result + tags: + - delete_volume + + - name: "View the job details to track the status of the delete storage volume task" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + failed_when: "'json' not in job_result" + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: + - job-tracking + + - name: "Reboot the system if the job status is pending." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "View the job details to verify if the task status is completed." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_initialize_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_initialize_job_tracking.yml new file mode 100644 index 00000000..fb79a288 --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_initialize_job_tracking.yml @@ -0,0 +1,88 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Redfish Storage Volume - Ansible Module" + vars: + retries_count: 100 + polling_interval: 10 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + - name: "Initialize an existing volume." + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + command: "initialize" + volume_id: "Disk.Virtual.1:RAID.Slot.1-1" + initialize_type: "Slow" + register: result + tags: + - initialize_volume + + - name: "View the job details to track the status of the initialization task" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + failed_when: "'json' not in job_result" + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: + - job-tracking + + - name: "Reboot the system if the job status is pending." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "View the job details to verify if the task status is completed." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" diff --git a/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_modify_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_modify_job_tracking.yml new file mode 100644 index 00000000..02bbc19d --- /dev/null +++ b/ansible_collections/dellemc/openmanage/playbooks/redfish/storage/redfish_storage_volume_modify_job_tracking.yml @@ -0,0 +1,89 @@ +--- +- hosts: redfish_hosts + connection: local + gather_facts: false + name: "Redfish Storage Volume - Ansible Module" + vars: + retries_count: 100 + polling_interval: 10 + reboot_uri: "/redfish/v1/Systems/System.Embedded.1/Actions/ComputerSystem.Reset" + + collections: + - dellemc.openmanage + + tasks: + - name: "Modify storage volume encryption settings." + redfish_storage_volume: + baseuri: "{{ baseuri }}" + username: "{{ username }}" + password: "{{ password }}" + ca_path: "/path/to/ca_cert.pem" + state: "present" + volume_id: "Disk.Virtual.1:RAID.Slot.1-1" + encryption_types: "ControllerAssisted" + encrypted: true + register: result + tags: + - modify_volume + + - name: "View the job details to track the status of the modify storage volume encryption task" + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: job_result + failed_when: "'json' not in job_result" + until: job_result.json.TaskState == 'Completed' or job_result.json.TaskState == 'Pending' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" + tags: + - job-tracking + + - name: "Reboot the system if the job status is pending." + uri: + url: "https://{{ baseuri }}{{ reboot_uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "POST" + body_format: raw + body: '{"ResetType": "ForceRestart"}' + use_proxy: yes + status_code: 204 + return_content: no + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: reboot_result + changed_when: reboot_result.status == 204 + when: job_result.json.TaskState == 'Pending' and job_result.json.Messages.0.Message == 'Task successfully scheduled.' + + - name: "View the job details to verify if the task status is completed." + uri: + url: "https://{{ baseuri }}{{ result.task.uri }}" + user: "{{ username }}" + password: "{{ password }}" + method: "GET" + use_proxy: yes + status_code: 200, 202 + return_content: yes + validate_certs: no + force_basic_auth: yes + headers: + Content-Type: "application/json" + Accept: "application/json" + register: final_result + failed_when: final_result.json.TaskState == "Completed" and final_result.json.TaskStatus != "OK" + until: final_result.json.TaskState == 'Completed' + retries: "{{ retries_count }}" + delay: "{{ polling_interval }}" |