summaryrefslogtreecommitdiffstats
path: root/ansible_collections/dellemc/openmanage/playbooks/ome
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/dellemc/openmanage/playbooks/ome')
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml40
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml53
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml97
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml115
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml65
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml44
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml73
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml33
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml31
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml40
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml61
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml57
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml119
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml52
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml35
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml26
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml28
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml75
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml51
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml63
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml26
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml121
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml142
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml111
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml72
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml65
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml167
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml79
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml68
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml52
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml105
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml59
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml54
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml66
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml60
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml72
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml189
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml59
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml69
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml57
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml134
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml35
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml32
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml62
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml32
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml33
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml125
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml57
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml47
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml119
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml31
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml51
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml36
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml212
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml47
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml48
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml47
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml338
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml129
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml33
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml27
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml46
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml66
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml48
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml70
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml33
68 files changed, 4863 insertions, 0 deletions
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml
new file mode 100644
index 00000000..f77eabdd
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_smtp.yml
@@ -0,0 +1,37 @@
+---
+- hosts: ome
+ connection: local
+ name: Configure the SMTP settings of OME and OME-M.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Update SMTP destination server configuration with authentication
+ ome_application_alerts_smtp:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ destination_address: "localhost"
+ port_number: 25
+ use_ssl: true
+ enable_authentication: true
+ credentials:
+ username: "username"
+ password: "password"
+ tags:
+ - smtp_auth
+ - name: Update SMTP destination server configuration without authentication
+ ome_application_alerts_smtp:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ destination_address: "localhost"
+ port_number: 25
+ use_ssl: false
+ enable_authentication: false
+ tags:
+ - smtp_no_auth \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml
new file mode 100644
index 00000000..9fce647e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_alerts_syslog.yml
@@ -0,0 +1,40 @@
+---
+- hosts: ome
+ connection: local
+ name: Configure syslog forwarding settings on OpenManage Enterprise and OpenManage Enterprise Modular
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Configure single server to forward syslog
+ ome_application_alerts_syslog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ syslog_servers:
+ - id: 1
+ enabled: true
+ destination_address: 192.168.0.2
+ port_number: 514
+
+ - name: Configure multiple server to forward syslog
+ ome_application_alerts_syslog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ syslog_servers:
+ - id: 1
+ port_number: 523
+ - id: 2
+ enabled: true
+ destination_address: sysloghost1.lab.com
+ - id: 3
+ enabled: false
+ - id: 4
+ enabled: true
+ destination_address: 192.168.0.4
+ port_number: 514 \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml
new file mode 100644
index 00000000..ab0fb9eb
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_certificate.yml
@@ -0,0 +1,53 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OME Application Certificate Signing Request.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: generate certificate signing request.
+ ome_application_certificate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "generate_csr"
+ distinguished_name: "hostname.com"
+ department_name: "Remote Access Group"
+ business_name: "Dell Inc."
+ locality: "Round Rock"
+ country_state: "Texas"
+ country: "US"
+ email: "support@dell.com"
+ register: result
+ tags:
+ - generate
+
+ - name: copy CSR data into a file.
+ ansible.builtin.copy:
+ content: "{{ result.csr_status.CertificateData }}"
+ dest: "csr_data.txt"
+ tags:
+ - csr-data
+
+ - name: upload the certificate.
+ ome_application_certificate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "upload"
+ upload_file: "/path/certificate.cer"
+ tags:
+ - upload
+
+ - name: "once certificate uploaded, OME cannot be accessed for few seconds, hence wait for 10 seconds."
+ wait_for:
+ host: "{{ hostname }}"
+ port: "{{ port }}"
+ delay: 10
+ tags:
+ - upload
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml
new file mode 100644
index 00000000..b0b29ae9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_console_preferences.yml
@@ -0,0 +1,97 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OME Application Console Preferences.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Update Console preferences with all the settings.
+ ome_application_console_preferences:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ report_row_limit: 123
+ device_health:
+ health_check_interval: 1
+ health_check_interval_unit: "Hourly"
+ health_and_power_state_on_connection_lost: "last_known"
+ discovery_settings:
+ general_device_naming: "DNS"
+ server_device_naming: "IDRAC_HOSTNAME"
+ invalid_device_hostname: "localhost"
+ common_mac_addresses: "::"
+ server_initiated_discovery:
+ device_discovery_approval_policy: "Automatic"
+ set_trap_destination: True
+ mx7000_onboarding_preferences: "all"
+ builtin_appliance_share:
+ share_options: "CIFS"
+ cifs_options: "V1"
+ email_sender_settings: "admin@dell.com"
+ trap_forwarding_format: "Original"
+ metrics_collection_settings: 31
+ tags:
+ - all_settings
+
+ - name: Update Console preferences with report and device health settings.
+ ome_application_console_preferences:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ report_row_limit: 236
+ device_health:
+ health_check_interval: 10
+ health_check_interval_unit: "Hourly"
+ health_and_power_state_on_connection_lost: "last_known"
+ tags:
+ - valid_report_device
+
+ - name: Update Console preferences with invalid device health settings.
+ ome_application_console_preferences:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_health:
+ health_check_interval: 65
+ health_check_interval_unit: "Minutes"
+ tags:
+ - invalid_device
+
+ - name: Update Console preferences with discovery and built in appliance share settings.
+ ome_application_console_preferences:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_settings:
+ general_device_naming: "DNS"
+ server_device_naming: "IDRAC_SYSTEM_HOSTNAME"
+ invalid_device_hostname: "localhost"
+ common_mac_addresses: "00:53:45:00:00:00"
+ builtin_appliance_share:
+ share_options: "CIFS"
+ cifs_options: "V1"
+ tags:
+ - valid_discovery
+
+ - name: Update Console preferences with server initiated discovery, mx7000 onboarding preferences, email sender, trap forwarding format, and metrics collection settings.
+ ome_application_console_preferences:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ server_initiated_discovery:
+ device_discovery_approval_policy: "Automatic"
+ set_trap_destination: True
+ mx7000_onboarding_preferences: "chassis"
+ email_sender_settings: "admin@dell.com"
+ trap_forwarding_format: "Normalized"
+ metrics_collection_settings: 361
+ tags:
+ - valid_metrics
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml
new file mode 100644
index 00000000..3eff08bc
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address.yml
@@ -0,0 +1,115 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OME Application network settings.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: IPv4 network settings
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ ipv4_configuration:
+ enable: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ use_dhcp_for_dns_server_names: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: ""
+ reboot_delay: 5
+ tags:
+ - ipv4_config
+
+ - name: IPv6 network settings
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ ipv6_configuration:
+ enable: true
+ enable_auto_configuration: true
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2
+ use_dhcp_for_dns_server_names: true
+ static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3
+ static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4
+ reboot_delay: 10
+ tags:
+ - ipv6_config
+
+ - name: Management vLAN settings for primary interface
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ management_vlan:
+ enable_vlan: true
+ vlan_id: 3344
+ dns_configuration:
+ register_with_dns: false
+ reboot_delay: 1
+ tags:
+ - mgmt_vlan
+
+ - name: DNS settings
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ ipv4_configuration:
+ enable: true
+ use_dhcp_for_dns_server_names: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: 192.168.0.5
+ dns_configuration:
+ register_with_dns: true
+ use_dhcp_for_dns_domain_name: false
+ dns_name: "MX-SVCTAG"
+ dns_domain_name: "localdomainname"
+ reboot_delay: 1
+ tags:
+ - dns_config
+
+ - name: Complete network settings
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ ipv4_configuration:
+ enable: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ use_dhcp_for_dns_server_names: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: 192.168.0.5
+ ipv6_configuration:
+ enable: true
+ enable_auto_configuration: true
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2
+ use_dhcp_for_dns_server_names: true
+ static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3
+ static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4
+ dns_configuration:
+ register_with_dns: true
+ use_dhcp_for_dns_domain_name: false
+ dns_name: "MX-SVCTAG"
+ dns_domain_name: "localdomainname"
+ reboot_delay: 1
+ tags:
+ - all_network_config \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml
new file mode 100644
index 00000000..1f4cf709
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_address_with_job_tracking.yml
@@ -0,0 +1,65 @@
+---
+- hosts: ome
+ vars:
+ retries_count: 50
+ polling_interval: 5 # in seconds
+ connection: local
+ name: OME - Complete network settings with details tracking
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Complete network settings
+ ome_application_network_address:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ ipv4_configuration:
+ enable: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ use_dhcp_for_dns_server_names: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: 192.168.0.5
+ ipv6_configuration:
+ enable: true
+ enable_auto_configuration: true
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: 2626:f2f2:f081:9:1c1c:f1f1:4747:2
+ use_dhcp_for_dns_server_names: true
+ static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3
+ static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4
+ dns_configuration:
+ register_with_dns: true
+ use_dhcp_for_dns_domain_name: false
+ dns_name: "MX-SVCTAG"
+ dns_domain_name: "localdomainname"
+ reboot_delay: 1
+ register: facts_result
+
+ # To end play when no job_info
+ - name: "End the play when no job_info"
+ meta: end_play
+ when:
+ - facts_result.changed == false
+ - "'job_info' not in facts_result"
+
+ - name: "Get job details using job id from network address config task."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ facts_result.job_info.Id }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Failed'
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml
new file mode 100644
index 00000000..0c0e8abf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_proxy.yml
@@ -0,0 +1,44 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Application network proxy setting.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Update proxy configuration and enable authentication.
+ ome_application_network_proxy:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ enable_proxy: true
+ ip_address: "192.168.0.2"
+ proxy_port: 444
+ enable_authentication: true
+ proxy_username: "root"
+ proxy_password: "proxy_password"
+ tags: setting1
+
+ - name: Reset proxy authentication.
+ ome_application_network_proxy:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ enable_proxy: true
+ ip_address: "192.168.0.2"
+ proxy_port: 444
+ enable_authentication: false
+ tags: setting2
+
+ - name: Reset proxy configuration.
+ ome_application_network_proxy:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ enable_proxy: false
+ tags: setting3
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml
new file mode 100644
index 00000000..68340ba9
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_settings.yml
@@ -0,0 +1,73 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Application network setting.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Configure universal inactivity timeout
+ ome_application_network_settings:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ session_inactivity_timeout:
+ enable_universal_timeout: true
+ universal_timeout: 30
+ api_sessions: 90
+ gui_sessions: 5
+ ssh_sessions: 2
+ serial_sessions: 1
+ tags:
+ - enable_universal_timeout
+ - name: Configure API and GUI timeout and sessions
+ ome_application_network_settings:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ session_inactivity_timeout:
+ api_timeout: 20
+ api_sessions: 100
+ gui_timeout: 25
+ gui_sessions: 5
+ tags:
+ - enable_api_gui_timout_sessions
+ - name: Configure timeout and sessions for all parameters
+ ome_application_network_settings:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ session_inactivity_timeout:
+ api_timeout: 20
+ api_sessions: 100
+ gui_timeout: 15
+ gui_sessions: 5
+ ssh_timeout: 30
+ ssh_sessions: 2
+ serial_timeout: 35
+ serial_sessions: 1
+ tags:
+ - enable_all_timeout_sessions
+ - name: Disable universal timeout and configure timeout and sessions for other parameters
+ ome_application_network_settings:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ session_inactivity_timeout:
+ enable_universal_timeout: false
+ api_timeout: 20
+ api_sessions: 100
+ gui_timeout: 15
+ gui_sessions: 5
+ ssh_timeout: 30
+ ssh_sessions: 2
+ serial_timeout: 35
+ serial_sessions: 1
+ tags:
+ - disa_all_timeout_sessions \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml
new file mode 100644
index 00000000..7dd4edad
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time.yml
@@ -0,0 +1,33 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Application network time setting.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Configure system time.
+ ome_application_network_time:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ enable_ntp: false
+ system_time: "2020-03-31 21:35:18"
+ time_zone: "TZ_ID_11"
+ tags: time_setting1
+
+ - name: Configure NTP server for time synchronization.
+ ome_application_network_time:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ time_zone: "TZ_ID_66"
+ enable_ntp: true
+ primary_ntp_address: "192.168.0.2"
+ secondary_ntp_address1: "192.168.0.3"
+ secondary_ntp_address2: "192.168.0.4"
+ tags: time_setting2
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml
new file mode 100644
index 00000000..a57e0b90
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_time_zone_info.yml
@@ -0,0 +1,31 @@
+---
+- hosts: ome
+ connection: local
+ gather_facts: false
+ name: "Ome application network time zone informaion - Ansible Module"
+ vars:
+ time_zone_uri: "/api/ApplicationService/Network/TimeZones"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Get list of all available times zones along with information specific to each time zone."
+ uri:
+ url: "https://{{ baseuri }}{{ time_zone_uri }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: "GET"
+ use_proxy: yes
+ status_code: 200
+ validate_certs: no
+ force_basic_auth: yes
+ register: time_zone_result
+ failed_when: "'value' not in time_zone_result.json"
+
+ - name: Get specific time zone ID using time zone name
+ with_items:
+ - "{{ time_zone_result.json.value }}"
+ debug:
+ msg: "{{item['Id']}}"
+ when: item['Name']=='(GMT+05:30) Sri Jayawardenepura'
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml
new file mode 100644
index 00000000..e445ed84
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver.yml
@@ -0,0 +1,40 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OME Application network webserver settings.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Update webserver port and session time out configuration.
+ ome_application_network_webserver:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ webserver_port: 443
+ webserver_timeout: 10
+ tags:
+ - port_timeout_update
+
+ - name: Update session time out
+ ome_application_network_webserver:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ webserver_timeout: 30
+ tags:
+ - timeout_update
+
+ - name: Update web server port.
+ ome_application_network_webserver:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ webserver_port: 8443
+ tags:
+ - port_update \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml
new file mode 100644
index 00000000..28911b80
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml
@@ -0,0 +1,61 @@
+---
+- hosts: ome
+ connection: local
+ name: "Dell OME Application network webserver port change and track web
+ server till the service restarts."
+ gather_facts: False
+ vars:
+ # 5 minutes wait max
+ retries_count: 30
+ polling_interval: 10
+ webserver_uri: "/api/ApplicationService/Network/WebServerConfiguration"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ # Update web server configuration
+ - name: Update webserver port and timeout of OME
+ ome_application_network_webserver:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ port: "{{ ome_webserver_port }}"
+ webserver_port: "{{ new_port }}"
+ webserver_timeout: 21
+ register: result
+
+ # To end play when no port change or failure
+ - name: "End the play when no port change"
+ meta: end_play
+ when:
+ - result.changed == false
+ - "'webserver_configuration' not in result"
+
+ # Loop till OME webserver is active by using the new port and webserver config GET call
+ - name: "Pause play until webserver URL is reachable from this host with new port"
+ uri:
+ url: "https://{{ hostname }}:{{ result.webserver_configuration.PortNumber
+ }}{{ webserver_uri }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: "GET"
+ use_proxy: yes
+ return_content: yes
+ validate_certs: no
+ force_basic_auth: yes
+ headers:
+ Content-Type: "application/json"
+ Accept: "application/json"
+ register: webport_result
+ until: "'PortNumber' in webport_result or webport_result.status == 200"
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+
+ # Output the webserver_configuration values to be used further
+ - name: "Output the webserver config"
+ vars:
+ webserver_configuration: "{{ webport_result.json }}"
+ debug:
+ var: webserver_configuration \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml
new file mode 100644
index 00000000..6a259e96
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/application/ome_application_security_settings.yml
@@ -0,0 +1,57 @@
+---
+- hosts: ome
+ connection: local
+ name: Configure login security settings
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Configure restricted allowed IP range
+ ome_application_security_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ restrict_allowed_ip_range:
+ enable_ip_range: true
+ ip_range: 192.1.2.3/24
+
+ - name: Configure login lockout policy
+ ome_application_security_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ login_lockout_policy:
+ by_user_name: true
+ by_ip_address: true
+ lockout_fail_count: 3
+ lockout_fail_window: 30
+ lockout_penalty_time: 900
+
+ - name: Configure restricted allowed IP range and login lockout policy with job wait time out of 60 seconds
+ ome_application_security_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ restrict_allowed_ip_range:
+ enable_ip_range: true
+ ip_range: 192.1.2.3/24
+ login_lockout_policy:
+ by_user_name: true
+ by_ip_address: true
+ lockout_fail_count: 3
+ lockout_fail_window: 30
+ lockout_penalty_time: 900
+ job_wait_timeout: 60
+
+ - name: Enable FIPS mode
+ ome_application_security_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fips_mode_enable: yes
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml
new file mode 100644
index 00000000..1d5f2375
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline.yml
@@ -0,0 +1,119 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible configuration compliance baseline.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Create a configuration compliance baseline using device IDs
+ ome_configuration_compliance_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ description: "description of baseline"
+ names: "baseline1"
+ device_ids:
+ - 1111
+ - 2222
+ tags:
+ - create_compliance_baseline_device_id
+
+ - name: Create a configuration compliance baseline using device service tags
+ ome_configuration_compliance_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ names: "baseline1"
+ description: "description of baseline"
+ device_service_tags:
+ - "SVCTAG1"
+ - "SVCTAG2"
+ tags:
+ - create_compliance_baseline_tags
+
+ - name: Create a configuration compliance baseline using group names
+ ome_configuration_compliance_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ job_wait_timeout: 1000
+ names: "baseline1"
+ description: "description of baseline"
+ device_group_names:
+ - "Group1"
+ - "Group2"
+ tags:
+ - create_compliance_baseline_group_id
+
+ - name: Delete the configuration compliance baselines
+ ome_configuration_compliance_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: delete
+ names:
+ - baseline1
+ - baseline2
+ tags:
+ - delete_compliance_baseline
+
+ - name: Modify a configuration compliance baseline using group names
+ ome_configuration_compliance_baseline:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ names: "baseline1"
+ new_name: "baseline_update"
+ template_name: "template2"
+ description: "new description of baseline"
+ job_wait_timeout: 1000
+ device_group_names:
+ - Group1
+
+ - name: Remediate specific non-compliant devices to a configuration compliance baseline using device IDs
+ ome_configuration_compliance_baseline:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "remediate"
+ names: "baseline1"
+ device_ids:
+ - 1111
+
+ - name: Remediate specific non-compliant devices to a configuration compliance baseline using device service tags
+ ome_configuration_compliance_baseline:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "remediate"
+ names: "baseline1"
+ job_wait_timeout: 2000
+ device_service_tags:
+ - "SVCTAG1"
+ - "SVCTAG2"
+
+ - name: Remediate all the non-compliant devices to a configuration compliance baseline
+ ome_configuration_compliance_baseline:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "remediate"
+ job_wait_timeout: 2000
+ names: "baseline1" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml
new file mode 100644
index 00000000..076ce84d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml
@@ -0,0 +1,52 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible configuration compliance baseline workflow.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Create a configuration compliance baseline using group names
+ ome_configuration_compliance_baseline:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ job_wait_timeout: 1000
+ names: "baseline1"
+ description: "description of baseline"
+ device_group_names:
+ - "Group1"
+ - "Group2"
+
+ - name: Retrieve the compliance report of all of the devices in the specified configuration compliance baseline.
+ ome_configuration_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline: "baseline1"
+ register: compliance_report
+
+ # This tasks returns list of device ids.
+ # In case if you want to get devices based on service tag change attribute ServiceTag
+ # and next task device_ids attribute replaced with device_service_tag.
+ - name: Filter the non compliant device based on the retrieved compliance report.
+ ansible.builtin.set_fact:
+ non_compliance_devices: "{{ compliance_report.compliance_info | json_query(\"value[?ComplianceStatus=='NONCOMPLIANT']\") | map(attribute='Id') | list }}"
+
+ - name: Remediate a specified non-complaint devices to a configuration compliance baseline using device IDs
+ ome_configuration_compliance_baseline:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "remediate"
+ names: "baseline1"
+ device_ids: "{{ non_compliance_devices }}"
+ when: "non_compliance_devices|length>0"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml
new file mode 100644
index 00000000..a2455703
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/compliance/ome_configuration_compliance_info.yml
@@ -0,0 +1,35 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Module for Device compliance information
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve the compliance report of all of the devices in the specified configuration compliance baseline.
+ ome_configuration_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline: baseline_name
+
+ - name: Retrieve the compliance report for a specific device associated with the baseline using the device ID.
+ ome_configuration_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline: baseline_name
+ device_id: 10001
+
+ - name: Retrieve the compliance report for a specific device associated with the baseline using the device service tag.
+ ome_configuration_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline: baseline_name
+ device_service_tag: 2HFGH3
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml
new file mode 100644
index 00000000..48259af6
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml
@@ -0,0 +1,26 @@
+---
+- hosts: ome
+ connection: local
+ gather_facts: false
+ name: "OME - Ansible Modules"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: "Retrieve baseline information for specific baseline."
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ register: result
+
+ - name: "Filter out device compliance reports."
+ loop: "{{ result.baseline_compliance_info }}"
+ debug:
+ msg: "{{item.ComponentComplianceReports}}"
+ loop_control:
+ label: "{{ item.DeviceId }}" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml
new file mode 100644
index 00000000..77d4eddf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml
@@ -0,0 +1,28 @@
+---
+- hosts: ome
+ connection: local
+ gather_facts: false
+ name: "OME - Ansible Modules"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: "Retrieve baseline information for specified devices."
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_ids:
+ - 11111
+ - 22222
+ register: result
+
+ - name: "Filter out device compliance reports."
+ debug:
+ msg: "{{ item.DeviceComplianceReports.0.ComponentComplianceReports }}"
+ loop: "{{ result.baseline_compliance_info }}"
+ loop_control:
+ label: "{{ item.Name }}" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml
new file mode 100644
index 00000000..35f0eb23
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline.yml
@@ -0,0 +1,75 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible OME firmware baseline operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Create baseline for device IDs
+ ome_firmware_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ baseline_description: "baseline_description"
+ catalog_name: "catalog_name"
+ device_ids:
+ - 1010
+ - 2020
+
+ - name: Create baseline for servicetags
+ ome_firmware_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ baseline_description: "baseline_description"
+ catalog_name: "catalog_name"
+ device_service_tags:
+ - "SVCTAG1"
+ - "SVCTAG2"
+
+ - name: create baseline for device groups without job_tracking
+ ome_firmware_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ baseline_description: "baseline_description"
+ catalog_name: "catalog_name"
+ device_group_names:
+ - "Group1"
+ - "Group2"
+ job_wait: no
+
+ - name: Modify an existing baseline
+ ome_firmware_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "existing_baseline_name"
+ new_baseline_name: "new_baseline_name"
+ baseline_description: "new baseline_description"
+ catalog_name: "catalog_other"
+ device_group_names:
+ - "Group3"
+ - "Group4"
+ - "Group5"
+ downgrade_enabled: no
+ is_64_bit: yes
+
+ - name: Delete a baseline
+ ome_firmware_baseline:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ baseline_name: "baseline_name" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml
new file mode 100644
index 00000000..cb42e174
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml
@@ -0,0 +1,51 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible template inventory details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieves device based compliance report for specified device IDs..
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_ids:
+ - 11111
+ - 22222
+ tags: device_ids
+
+ - name: Retrieves device based compliance report for specified service Tags.
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tags:
+ - MXL1234
+ - MXL4567
+ tags: device_service_tags
+
+ - name: Retrieves device based compliance report for specified group names.
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_group_names:
+ - group1
+ - group2
+ tags: device_group_names
+
+ - name: Retrieves device compliance report for a specified baseline.
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ tags: baseline_device \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml
new file mode 100644
index 00000000..bbbf5f0d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml
@@ -0,0 +1,63 @@
+---
+- hosts: ome
+ connection: local
+ gather_facts: false
+ name: "OME - Ansible Modules"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: "Retrieve baseline information for specific device ids."
+ ome_firmware_baseline_compliance_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_ids:
+ - 11111
+ - 11112
+ register: result
+
+ tags:
+ - overall-compliance-report
+
+ - name: "Firmware baseline compliance info based on FirmwareStatus - Non-Compliant"
+ set_fact:
+ non_compliance_fact: "{{ item }}"
+ when:
+ - item.DeviceComplianceReports.0.FirmwareStatus=='Non-Compliant'
+ with_items:
+ - "{{ result.baseline_compliance_info }}"
+ loop_control:
+ label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.FirmwareStatus }}"
+
+ tags:
+ - non-compliance-report
+
+ - name: "Firmware baseline compliance info based on Device ID"
+ set_fact:
+ device_fact: "{{ item }}"
+ when:
+ - item.DeviceComplianceReports.0.DeviceId==11111
+ with_items:
+ - "{{ result.baseline_compliance_info }}"
+ loop_control:
+ label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.DeviceId }}"
+
+ tags:
+ - device-id-report
+
+ - name: "Firmware baseline compliance info based on Device Service Tag"
+ set_fact:
+ service_tag_fact: "{{ item }}"
+ when:
+ - item.DeviceComplianceReports.0.ServiceTag=='1X1X1'
+ with_items:
+ - "{{ result.baseline_compliance_info }}"
+ loop_control:
+ label: "{{ item.Name }} - {{ item.DeviceComplianceReports.0.ServiceTag }}"
+
+ tags:
+ - device-service-tag-report \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml
new file mode 100644
index 00000000..7993db51
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml
@@ -0,0 +1,26 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible firmware baseline details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve details of all the available firmware baselines.
+ ome_firmware_baseline_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ tags: firmware_baselines
+
+ - name: Retrieve details of a specific firmware baseline identified by its baseline name.
+ ome_firmware_baseline_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_name"
+ tags: firmware_baseline \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml
new file mode 100644
index 00000000..a065a3c0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/catalog/ome_firmware_catalog.yml
@@ -0,0 +1,121 @@
+---
+- hosts: ome
+ connection: local
+ name: "OME - Create Catalog using Repository."
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Create a catalog from HTTPS repository
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: catalog1
+ catalog_description: catalog description
+ source: downloads.company.com
+ repository_type: HTTPS
+ source_path: "catalog"
+ file_name: "catalog.gz"
+ check_certificate: True
+
+ - name: Create a catalog from HTTP repository
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: "{{ catalog_name }}"
+ catalog_description: catalog description
+ source: downloads.company.com
+ repository_type: HTTP
+ source_path: "catalog"
+ file_name: "catalog.gz"
+
+ - name: Create a catalog using CIFS share
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: "{{ catalog_name }}"
+ catalog_description: catalog description
+ source: "192.166.0.1"
+ repository_type: CIFS
+ source_path: "cifs/R940"
+ file_name: "catalog.gz"
+ repository_username: "{{ repository_username }}"
+ repository_password: "{{ repository_password }}"
+ repository_domain: "{{ repository_domain }}"
+
+ - name: Create a catalog using NFS share
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: "{{ catalog_name }}"
+ catalog_description: catalog description
+ source: "192.166.0.2"
+ repository_type: NFS
+ source_path: "/nfs/R940"
+ file_name: "catalog.xml"
+
+ - name: Create a catalog using repository from Dell.com
+ ome_firmware_catalog:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: "catalog_name"
+ catalog_description: "catalog_description"
+ repository_type: "DELL_ONLINE"
+ check_certificate: True
+
+ - name: Modify a catalog using a repository from CIFS share
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_name: "{{ catalog_name }}"
+ catalog_description: new catalog description
+ source: "192.166.0.2"
+ repository_type: CIFS
+ source_path: "cifs/R941"
+ file_name: "catalog1.gz"
+ repository_username: "{{ repository_username }}"
+ repository_password: "{{ repository_password }}"
+ repository_domain: "{{ repository_domain }}"
+
+ - name: Modify a catalog using a repository from Dell.com
+ ome_firmware_catalog:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ catalog_id: 10
+ repository_type: DELL_ONLINE
+ new_catalog_name: "new_catalog_name"
+ catalog_description: "new_catalog_description"
+
+ - name: Delete catalog using catalog name
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ catalog_name: ["catalog_name1", "catalog_name2"]
+
+ - name: Delete catalog using catalog id
+ ome_firmware_catalog:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ catalog_id: [11, 34] \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml
new file mode 100644
index 00000000..198e2cce
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware.yml
@@ -0,0 +1,142 @@
+---
+- hosts: ome
+ connection: local
+ name: "OME - Update Firmware"
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Update firmware from a DUP file using a device ids
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id:
+ - 11111
+ - 22222
+ dup_file: "/path/Chassis-System-Management_Firmware_6N9WN_WN64_1.00.01_A00.EXE"
+
+ - name: Update firmware from a DUP file using a device service tags
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - KLBR111
+ - KLBR222
+ dup_file: "/path/Network_Firmware_NTRW0_WN64_14.07.07_A00-00_01.EXE"
+
+ - name: Update firmware from a DUP file using a device group names
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_group_names:
+ - servers
+ dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE"
+
+ - name: Update firmware using baseline name
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+
+ - name: Stage firmware for the next reboot using baseline name
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ schedule: StageForNextReboot
+
+ - name: Update firmware using baseline name and components
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ components:
+ - BIOS
+
+ - name: Update firmware of device components from a DUP file using a device ids in a baseline
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ device_id:
+ - 11111
+ - 22222
+ components:
+ - iDRAC with Lifecycle Controller
+
+ - name: Update firmware of device components from a baseline using a device service tags under a baseline
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ device_service_tag:
+ - KLBR111
+ - KLBR222
+ components:
+ - IOM-SAS
+
+ - name: Update firmware using baseline name with a device id and required components
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ devices:
+ - id: 12345
+ components:
+ - Lifecycle Controller
+ - id: 12346
+ components:
+ - Enterprise UEFI Diagnostics
+ - BIOS
+
+ - name: Update firmware using baseline name with a device service tag and required components
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ devices:
+ - service_tag: ABCDE12
+ components:
+ - PERC H740P Adapter
+ - BIOS
+ - service_tag: GHIJK34
+ components:
+ - OS Drivers Pack
+
+ - name: Update firmware using baseline name with a device service tag or device id and required components
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ devices:
+ - service_tag: ABCDE12
+ components:
+ - BOSS-S1 Adapter
+ - PowerEdge Server BIOS
+ - id: 12345
+ components:
+ - iDRAC with Lifecycle Controller
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml
new file mode 100644
index 00000000..c104f3f5
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/firmware/ome_firmware_with_job_tracking.yml
@@ -0,0 +1,111 @@
+---
+- hosts: ome
+ connection: local
+ name: "OME - Update Firmware"
+ gather_facts: False
+ vars:
+ retries_count: 100
+ polling_interval: 10
+ all_firmware_task_tags:
+ - device-ids
+ - service-tags
+ - group-name
+ - baseline-name
+ - baseline-name-dup
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Update firmware from a DUP file using a device ids."
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id:
+ - 11111
+ - 22222
+ dup_file: "/path/Chassis-System-Management_Firmware_6N9WN_WN64_1.00.01_A00.EXE"
+ register: result
+ tags:
+ - device-ids
+
+ - name: "Update firmware from a DUP file using a device service tags."
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - KLBR111
+ - KLBR222
+ dup_file: "/path/Network_Firmware_NTRW0_WN64_14.07.07_A00-00_01.EXE"
+ register: result
+ tags:
+ - service-tags
+
+ - name: "Update firmware from a DUP file using a device group names."
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_group_names:
+ - servers
+ dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE"
+ register: result
+ tags:
+ - group-name
+
+ - name: "Update firmware using baseline name."
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: baseline_devices
+ register: result
+ tags:
+ - baseline-name
+
+ - name: "Update firmware from a DUP file using a baseline names."
+ ome_firmware:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ baseline_name: "baseline_devices, baseline_groups"
+ dup_file: "/path/BIOS_87V69_WN64_2.4.7.EXE"
+ tags:
+ - baseline-name-dup
+
+ - name: "Track job details for the ome firmware update operation using a job id."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.update_status.Id }}"
+ register: job_result
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed' or job_result.job_info.LastRunStatus.Name == 'Warning'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+ tags: "{{ all_firmware_task_tags }}"
+
+ - name: "Set job fact details if the task status is warning."
+ set_fact:
+ ome_firmware_job_fact: "{{ job_result | combine(job_msg, recursive=true) }}"
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Warning'
+ vars:
+ job_msg: {'msg': 'Completed with {{ job_result.job_info.LastRunStatus.Name|lower}}'}
+ when: job_result.job_info.LastRunStatus.Name == 'Warning'
+ tags: "{{ all_firmware_task_tags }}"
+
+ - name: "Set job fact details if the task status is completed or failed."
+ set_fact:
+ ome_firmware_job_fact: "{{ job_result }}"
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Failed'
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ when: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ tags: "{{ all_firmware_task_tags }}" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml
new file mode 100644
index 00000000..16011809
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_active_directory.yml
@@ -0,0 +1,72 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Active Directory service configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Add Active Directory service using DNS lookup along with the test connection
+ ome_active_directory:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: my_ad1
+ domain_server:
+ - domainname.com
+ group_domain: domainname.com
+ test_connection: yes
+ domain_username: user@domainname
+ domain_password: domain_password
+
+ - name: Add Active Directory service using IP address of the domain controller with certificate validation
+ ome_active_directory:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: my_ad2
+ domain_controller_lookup: MANUAL
+ domain_server:
+ - 192.68.20.181
+ group_domain: domainname.com
+ validate_certificate: yes
+ certificate_file: "/path/to/certificate/file.cer"
+
+ - name: Modify domain controller IP address, network_timeout and group_domain
+ ome_active_directory:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: my_ad2
+ domain_controller_lookup: MANUAL
+ domain_server:
+ - 192.68.20.189
+ group_domain: newdomain.in
+ network_timeout: 150
+
+ - name: Delete Active Directory service
+ ome_active_directory:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: my_ad2
+ state: absent
+
+ - name: Test connection to existing Active Directory service with certificate validation
+ ome_active_directory:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: my_ad2
+ test_connection: yes
+ domain_username: user@domainname
+ domain_password: domain_password
+ validate_certificate: yes
+ certificate_file: "/path/to/certificate/file.cer"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml
new file mode 100644
index 00000000..0099fc80
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_chassis_slots.yml
@@ -0,0 +1,65 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible slot name configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Rename the slots in multiple chassis using slot number and chassis service tag.
+ ome_chassis_slots:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ slot_options:
+ - chassis_service_tag: ABC1234
+ slots:
+ - slot_number: 1
+ slot_name: sled_name_1
+ - slot_number: 2
+ slot_name: sled_name_2
+ - chassis_service_tag: ABC1235
+ slots:
+ - slot_number: 1
+ slot_name: sled_name_1
+ - slot_number: 2
+ slot_name: sled_name_2
+
+ - name: Rename single slot name of the sled using sled ID
+ ome_chassis_slots:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_options:
+ - device_id: 10054
+ slot_name: slot_device_name_1
+
+ - name: Rename single slot name of the sled using sled service tag
+ ome_chassis_slots:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_options:
+ - device_service_tag: ABC1234
+ slot_name: service_tag_slot
+
+ - name: Rename multiple slot names of the devices
+ ome_chassis_slots:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_options:
+ - device_id: 10054
+ slot_name: sled_name_1
+ - device_service_tag: ABC1234
+ slot_name: sled_name_2
+ - device_id: 10055
+ slot_name: sled_name_3
+ - device_service_tag: PQR1234
+ slot_name: sled_name_4
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml
new file mode 100644
index 00000000..d7af342a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_group.yml
@@ -0,0 +1,167 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible device inventory details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Add devices to a static device group by using the group name and device IDs
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: "Storage Services"
+ device_ids:
+ - 11111
+ - 11112
+ tags: device-id
+
+ - name: Add devices to a static device group by using the group name and device service tags
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: "Storage Services"
+ device_service_tags:
+ - GHRT2R
+ - KJHDF3
+ tags: device-service-tags
+
+ - name: Add devices to a static device group by using the group ID and device service tags
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ group_id: 12345
+ device_service_tags:
+ - GHRT2R
+ - KJHDF3
+ tags: group_id_device-service-tags
+
+ - name: Add devices to a static device group by using the group name and IPv4 addresses
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: "Storage Services"
+ ip_addresses:
+ - 192.35.0.1
+ - 192.35.0.5
+ tags: group_name_ipv4
+
+ - name: Add devices to a static device group by using the group ID and IPv6 addresses
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ group_id: 12345
+ ip_addresses:
+ - fe80::ffff:ffff:ffff:ffff
+ - fe80::ffff:ffff:ffff:2222
+ tags: group_id_ipv6
+
+ - name: Add devices to a static device group by using the group ID and supported IPv4 and IPv6 address formats.
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ group_id: 12345
+ ip_addresses:
+ - 192.35.0.1
+ - 10.36.0.0-192.36.0.255
+ - 192.37.0.0/24
+ - fe80::ffff:ffff:ffff:ffff
+ - ::ffff:192.0.2.0/125
+ - fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff
+ tags: group_id_ipv4_ipv6
+
+ - name: Remove devices from a static device group by using the group name and device IDs
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "Storage Services"
+ device_ids:
+ - 11111
+ - 11112
+ tags: device-id
+
+ - name: Remove devices from a static device group by using the group name and device service tags
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "Storage Services"
+ device_service_tags:
+ - GHRT2R
+ - KJHDF3
+ tags: device-service-tags
+
+ - name: Remove devices from a static device group by using the group ID and device service tags
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ group_id: 12345
+ device_service_tags:
+ - GHRT2R
+ - KJHDF3
+ tags: group_id_device-service-tags
+
+ - name: Remove devices from a static device group by using the group name and IPv4 addresses
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "Storage Services"
+ ip_addresses:
+ - 192.35.0.1
+ - 192.35.0.5
+ tags: group_name_ipv4
+
+ - name: Remove devices from a static device group by using the group ID and IPv6 addresses
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ group_id: 12345
+ ip_addresses:
+ - fe80::ffff:ffff:ffff:ffff
+ - fe80::ffff:ffff:ffff:2222
+ tags: group_id_ipv6
+
+ - name: Remove devices from a static device group by using the group ID and supported IPv4 and IPv6 address formats.
+ ome_device_group:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ group_id: 12345
+ ip_addresses:
+ - 192.35.0.1
+ - 10.36.0.0-192.36.0.255
+ - 192.37.0.0/24
+ - fe80::ffff:ffff:ffff:ffff
+ - ::ffff:192.0.2.0/125
+ - fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff
+ tags: group_id_ipv4_ipv6
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml
new file mode 100644
index 00000000..6b307749
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_info.yml
@@ -0,0 +1,79 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible device inventory details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve basic inventory of all devices.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Retrieve basic inventory for devices identified by IDs 33333 or 11111 using filtering.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fact_subset: "basic_inventory"
+ system_query_options:
+ filter: "Id eq 33333 or Id eq 11111"
+
+ - name: Retrieve inventory details of specified devices identified by IDs 11111 and 22222.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fact_subset: "detailed_inventory"
+ system_query_options:
+ device_id:
+ - 11111
+ - 22222
+
+ - name: Retrieve inventory details of specified devices identified by service tags MXL1234 and MXL4567.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fact_subset: "detailed_inventory"
+ system_query_options:
+ device_service_tag:
+ - MXL1234
+ - MXL4567
+
+ - name: Retrieve details of specified inventory type of specified devices identified by ID and service tags.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fact_subset: "detailed_inventory"
+ system_query_options:
+ device_id:
+ - 11111
+ device_service_tag:
+ - MXL1234
+ - MXL4567
+ inventory_type: "serverDeviceCards"
+
+ - name: Retrieve subsystem health of specified devices identified by service tags.
+ ome_device_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ fact_subset: "subsystem_health"
+ system_query_options:
+ device_service_tag:
+ - MXL1234
+ - MXL4567
+
+
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml
new file mode 100644
index 00000000..6f282c8a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_local_access_configuration.yml
@@ -0,0 +1,68 @@
+---
+- hosts: ome
+ connection: local
+ name: OpenManage Ansible Modules for local access settings.
+ gather_facts: false
+ collections: dellemc.openmanage
+
+ tasks:
+
+ - name: Configure KVM, direct access and power button settings of the chassis using device ID.
+ ome_device_local_access_configuration:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25011
+ enable_kvm_access: true
+ enable_chassis_direct_access: false
+ chassis_power_button:
+ enable_chassis_power_button: false
+ enable_lcd_override_pin: true
+ disabled_button_lcd_override_pin: 123456
+ tags: lac-device-id
+
+ - name: Configure Quick sync and LCD settings of the chassis using device service tag.
+ ome_device_local_access_configuration:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: GHRT2RL
+ quick_sync:
+ quick_sync_access: READ_ONLY
+ enable_read_authentication: true
+ enable_quick_sync_wifi: true
+ enable_inactivity_timeout: true
+ timeout_limit: 10
+ timeout_limit_unit: MINUTES
+ lcd:
+ lcd_access: VIEW_ONLY
+ lcd_language: en
+ user_defined: "LCD Text"
+ tags: lac-tag
+
+ - name: Configure all local access settings of the host chassis.
+ ome_device_local_access_configuration:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ enable_kvm_access: true
+ enable_chassis_direct_access: false
+ chassis_power_button:
+ enable_chassis_power_button: false
+ enable_lcd_override_pin: true
+ disabled_button_lcd_override_pin: 123456
+ quick_sync:
+ quick_sync_access: READ_WRITE
+ enable_read_authentication: true
+ enable_quick_sync_wifi: true
+ enable_inactivity_timeout: true
+ timeout_limit: 120
+ timeout_limit_unit: SECONDS
+ lcd:
+ lcd_access: VIEW_MODIFY
+ lcd_language: en
+ user_defined: "LCD Text"
+ tags: lac-host
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml
new file mode 100644
index 00000000..d2d86050
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_location.yml
@@ -0,0 +1,52 @@
+---
+- hosts: ome
+ connection: local
+ name: OpenManage Ansible Modules
+ gather_facts: false
+ collections: dellemc.openmanage
+
+ tasks:
+
+ - name: Update device location settings of a chassis using the device ID.
+ ome_device_location:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25011
+ data_center: data center 1
+ room: room 1
+ aisle: aisle 1
+ rack: rack 1
+ rack_slot: 2
+ location: location 1
+ tags: location-device-id
+
+ - name: Update device location settings of a chassis using the device service tag.
+ ome_device_location:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: GHRT2RL
+ data_center: data center 1
+ room: room 1
+ aisle: aisle 1
+ rack: rack 1
+ rack_slot: 2
+ location: location 1
+ tags: location-device-service-tag
+
+ - name: Update device location settings of the host chassis.
+ ome_device_location:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ data_center: data center 1
+ room: room 1
+ aisle: aisle 1
+ rack: rack 1
+ rack_slot: 2
+ location: location 1
+ tags: location-chassis
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml
new file mode 100644
index 00000000..e05a3772
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_mgmt_network.yml
@@ -0,0 +1,105 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OME Modular device network settings.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Network settings for chassis
+ ome_device_mgmt_network:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: CHAS123
+ delay: 10
+ ipv4_configuration:
+ enable_ipv4: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ use_dhcp_to_obtain_dns_server_address: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: 192.168.0.5
+ ipv6_configuration:
+ enable_ipv6: true
+ enable_auto_configuration: false
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: ffff::2607:f2b1:f081:9
+ use_dhcpv6_to_obtain_dns_server_address: false
+ static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3
+ static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4
+ dns_configuration:
+ register_with_dns: true
+ use_dhcp_for_dns_domain_name: false
+ dns_name: MX-SVCTAG
+ dns_domain_name: dnslocaldomain
+ auto_negotiation: no
+ network_speed: 100_MB
+
+ - name: Network settings for server
+ ome_device_mgmt_network:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: SRVR123
+ ipv4_configuration:
+ enable_ipv4: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ use_dhcp_to_obtain_dns_server_address: false
+ static_preferred_dns_server: 192.168.0.4
+ static_alternate_dns_server: 192.168.0.5
+ ipv6_configuration:
+ enable_ipv6: true
+ enable_auto_configuration: false
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: ffff::2607:f2b1:f081:9
+ use_dhcpv6_to_obtain_dns_server_address: false
+ static_preferred_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:3
+ static_alternate_dns_server: 2626:f2f2:f081:9:1c1c:f1f1:4747:4
+
+ - name: Network settings for I/O module
+ ome_device_mgmt_network:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: IOM1234
+ ipv4_configuration:
+ enable_ipv4: true
+ enable_dhcp: false
+ static_ip_address: 192.168.0.2
+ static_subnet_mask: 255.255.254.0
+ static_gateway: 192.168.0.3
+ ipv6_configuration:
+ enable_ipv6: true
+ enable_auto_configuration: false
+ static_ip_address: 2626:f2f2:f081:9:1c1c:f1f1:4747:1
+ static_prefix_length: 10
+ static_gateway: ffff::2607:f2b1:f081:9
+ dns_server_settings:
+ preferred_dns_server: 192.168.0.4
+ alternate_dns_server1: 192.168.0.5
+
+ - name: Management VLAN configuration of chassis using device id
+ ome_device_mgmt_network:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 12345
+ management_vlan:
+ enable_vlan: true
+ vlan_id: 2345
+ dns_configuration:
+ register_with_dns: false \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml
new file mode 100644
index 00000000..0a47d2dd
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_network_services.yml
@@ -0,0 +1,59 @@
+---
+- hosts: ome
+ connection: local
+ name: OpenManage Ansible Modules for network services settings.
+ gather_facts: false
+ collections: dellemc.openmanage
+
+ tasks:
+
+ - name: Update network services settings of a chassis using the device ID.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25011
+ snmp_settings:
+ enabled: true
+ port_number: 161
+ community_name: public
+ ssh_settings:
+ enabled: false
+ remote_racadm_settings:
+ enabled: false
+ tags: snmp-settings
+
+ - name: Update network services settings of a chassis using the device service tag.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: GHRT2RL
+ snmp_settings:
+ enabled: false
+ ssh_settings:
+ enabled: true
+ port_number: 22
+ max_sessions: 1
+ max_auth_retries: 3
+ idle_timeout: 1
+ remote_racadm_settings:
+ enabled: false
+ tags: ssh-settings
+
+ - name: Update network services settings of the host chassis.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25012
+ snmp_settings:
+ enabled: false
+ ssh_settings:
+ enabled: false
+ remote_racadm_settings:
+ enabled: true
+ tags: racadm-settings
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml
new file mode 100644
index 00000000..4b68a29b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_power_settings.yml
@@ -0,0 +1,54 @@
+---
+- hosts: ome
+ connection: local
+ name: OpenManage Ansible Modules
+ gather_facts: false
+ collections: dellemc.openmanage
+
+ tasks:
+
+ - name: Update power configuration settings of a chassis using the device ID.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25011
+ power_configuration:
+ enable_power_cap: true
+ power_cap: 3424
+ tags: power-config
+
+ - name: Update redundancy configuration settings of a chassis using the device service tag.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: GHRT2RL
+ redundancy_configuration:
+ redundancy_policy: GRID_REDUNDANCY
+ tags: redundancy-config
+
+ - name: Update hot spare configuration settings of a chassis using device ID.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25012
+ hot_spare_configuration:
+ enable_hot_spare: true
+ primary_grid: GRID_1
+ tags: hostspare-config
+
+ - name: Update power configuration settings of a host chassis.
+ ome_device_power_settings:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ power_configuration:
+ enable_power_cap: true
+ power_cap: 3425
+ tags: power-config-chassis
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml
new file mode 100644
index 00000000..71a07e68
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_device_quick_deploy.yml
@@ -0,0 +1,66 @@
+---
+- hosts: ome
+ connection: local
+ name: OpenManage Ansible Modules for Quick Deploy settings.
+ gather_facts: false
+ collections: dellemc.openmanage
+
+ tasks:
+
+ - name: Configure server Quick Deploy settings of the chassis using device ID.
+ ome_device_quick_deploy:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25011
+ setting_type: ServerQuickDeploy
+ quick_deploy_options:
+ password: "password"
+ ipv4_enabled: True
+ ipv4_network_type: Static
+ ipv4_subnet_mask: 255.255.255.0
+ ipv4_gateway: 192.168.0.1
+ ipv6_enabled: True
+ ipv6_network_type: Static
+ ipv6_prefix_length: 1
+ ipv6_gateway: "::"
+ slots:
+ - slot_id: 1
+ slot_ipv4_address: 192.168.0.2
+ slot_ipv6_address: "::"
+ vlan_id: 1
+ - slot_id: 2
+ slot_ipv4_address: 192.168.0.3
+ slot_ipv6_address: "::"
+ vlan_id: 2
+ tags: server-quick-deploy
+
+ - name: Configure server Quick Deploy settings of the chassis using device service tag.
+ ome_device_quick_deploy:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: GHRT2RL
+ setting_type: IOMQuickDeploy
+ quick_deploy_options:
+ password: "password"
+ ipv4_enabled: True
+ ipv4_network_type: Static
+ ipv4_subnet_mask: 255.255.255.0
+ ipv4_gateway: 192.168.0.1
+ ipv6_enabled: True
+ ipv6_network_type: Static
+ ipv6_prefix_length: 1
+ ipv6_gateway: "::"
+ slots:
+ - slot_id: 1
+ slot_ipv4_address: 192.168.0.2
+ slot_ipv6_address: "::"
+ vlan_id: 1
+ - slot_id: 2
+ slot_ipv4_address: 192.168.0.3
+ slot_ipv6_address: "::"
+ vlan_id: 2
+ tags: iom-quick-deploy
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml
new file mode 100644
index 00000000..ba93eb00
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_devices.yml
@@ -0,0 +1,60 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible device operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Refresh Inventory
+ ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_action: refresh_inventory
+ device_service_tags:
+ - 2HB7NX2
+
+ - name: Clear iDRAC job queue
+ ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_action: clear_idrac_job_queue
+ device_service_tags:
+ - 2HB7NX2
+
+ - name: Reset iDRAC using the service tag
+ ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_action: reset_idrac
+ device_service_tags:
+ - 2H7HNX2
+
+ - name: Remove devices using servicetags
+ ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ device_service_tags:
+ - SVCTAG1
+ - SVCTAF2
+
+ - name: Remove devices using IDs
+ ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ device_ids:
+ - 10235
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml
new file mode 100644
index 00000000..b5f0fc97
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_diagnostics.yml
@@ -0,0 +1,72 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible diagnostics operation.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Application log extraction using CIFS share location
+ ome_diagnostics:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ share_type: CIFS
+ share_address: "{{ share_address }}"
+ share_user: "{{ share_username }}"
+ share_password: "{{ share_password }}"
+ share_name: "{{ share_name }}"
+ log_type: application
+ mask_sensitive_info: false
+ test_connection: true
+ tags: app-cifs-log
+
+ - name: Application log extraction using NFS share location
+ ome_diagnostics:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ share_address: "{{ share_address }}"
+ share_type: NFS
+ share_name: "{{ share_name }}"
+ log_type: application
+ mask_sensitive_info: true
+ test_connection: true
+ tags: app-nfs-log
+
+ - name: Support assist log extraction using CIFS share location
+ ome_diagnostics:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ share_address: "{{ share_address }}"
+ share_user: "{{ share_username }}"
+ share_password: "{{ share_password }}"
+ share_name: "{{ share_name }}"
+ share_type: CIFS
+ log_type: support_assist_collection
+ device_ids:
+ - 10011
+ - 10022
+ log_selectors: [OS_LOGS]
+ test_connection: true
+ tags: tsr-cifs-log
+
+ - name: Support assist log extraction using NFS share location
+ ome_diagnostics:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ share_address: "{{ share_address }}"
+ share_type: NFS
+ share_name: "{{ share_name }}"
+ log_type: support_assist_collection
+ device_group_name: group_name
+ test_connection: true
+ tags: tsr-nfs-log
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml
new file mode 100644
index 00000000..1a16e328
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_discovery.yml
@@ -0,0 +1,189 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible discovery operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Discover servers in a range
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discovery_server_1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.1-192.96.24.255
+ device_types:
+ - SERVER
+ wsman:
+ username: user
+ password: password
+ tags:
+ - server_discovery
+
+ - name: Discover chassis in a range
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discovery_chassis_1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.1-192.96.24.255
+ device_types:
+ - CHASSIS
+ wsman:
+ username: user
+ password: password
+ tags:
+ - chassis_discovery
+
+ - name: Discover switches in a range
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discover_switch_1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.1-192.96.24.255
+ device_types:
+ - NETWORK SWITCH
+ snmp:
+ community: snmp_creds
+ tags:
+ - switch_discovery
+
+ - name: Discover storage in a range
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discover_storage_1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.1-192.96.24.255
+ device_types:
+ - STORAGE
+ storage:
+ username: user
+ password: password
+ snmp:
+ community: community_str
+ tags:
+ - storage_discovery
+
+ - name: Delete a discovery job
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ discovery_job_name: "Discovery-123"
+ tags:
+ - delete_discovery
+
+ - name: Schedule the discovery of multiple devices ignoring partial failure and enable trap to receive alerts
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ discovery_job_name: "Discovery-123"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.1-192.96.24.255
+ - 192.96.0.0/24
+ - 192.96.26.108
+ device_types:
+ - SERVER
+ - CHASSIS
+ - STORAGE
+ - NETWORK SWITCH
+ wsman:
+ username: wsman_user
+ password: wsman_pwd
+ redfish:
+ username: redfish_user
+ password: redfish_pwd
+ snmp:
+ community: snmp_community
+ - network_address_detail:
+ - 192.96.25.1-192.96.25.255
+ - ipmihost
+ - esxiserver
+ - sshserver
+ device_types:
+ - SERVER
+ ssh:
+ username: ssh_user
+ password: ssh_pwd
+ vmware:
+ username: vm_user
+ password: vmware_pwd
+ ipmi:
+ username: ipmi_user
+ password: ipmi_pwd
+ schedule: RunLater
+ cron: "0 0 9 ? * MON,WED,FRI *"
+ ignore_partial_failure: True
+ trap_destination: True
+ community_string: True
+ email_recipient: test_email@company.com
+ tags:
+ - schedule_discovery
+
+ - name: Discover servers with ca check enabled
+ ome_discovery:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discovery_server_ca1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.108
+ device_types:
+ - SERVER
+ wsman:
+ username: user
+ password: password
+ ca_check: True
+ certificate_data: "{{ lookup('ansible.builtin.file', '/path/to/certificate_data_file') }}"
+ tags:
+ - server_ca_check
+
+ - name: Discover chassis with ca check enabled data
+ ome_discovery:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ discovery_job_name: "Discovery_chassis_ca1"
+ discovery_config_targets:
+ - network_address_detail:
+ - 192.96.24.108
+ device_types:
+ - CHASSIS
+ redfish:
+ username: user
+ password: password
+ ca_check: True
+ certificate_data: "-----BEGIN CERTIFICATE-----\r\n
+ ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n
+ ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n
+ ABCDEFGHIJKLMNOPQRSTUVWXYZaqwertyuiopasdfghjklzxcvbnmasdasagasvv\r\n
+ aqwertyuiopasdfghjklzxcvbnmasdasagasvv=\r\n
+ -----END CERTIFICATE-----"
+ tags:
+ - chassis_ca_check_data \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml
new file mode 100644
index 00000000..7229f638
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_domain_user_groups.yml
@@ -0,0 +1,59 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible AD directory user group operation.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Create Active Directory user groups.
+ ome_domain_user_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ group_name: account operators
+ directory_name: directory_name
+ role: administrator
+ domain_username: username@domain
+ domain_password: domain_password
+ tags: user-group-add
+
+ - name: Create Active Directory user groups with different domain format.
+ ome_domain_user_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ group_name: account operators
+ directory_name: directory_name
+ role: administrator
+ domain_username: domain\\username
+ domain_password: domain_password
+ tags: user-group-add-domain
+
+ - name: Update Active Directory user groups.
+ ome_domain_user_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ group_name: account operators
+ role: chassis administrator
+ tags: user-group-update
+
+ - name: Remove Active Directory user groups.
+ ome_domain_user_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ group_name: "Administrators"
+ tags: user-group-remove
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml
new file mode 100644
index 00000000..08b03786
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_group_device_action.yml
@@ -0,0 +1,69 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible group device operations.
+ gather_facts: False
+ vars:
+ group_name: Dell iDRAC Servers
+ device_action: refresh_inventory #other options are clear_idrac_job_queue, reset_idrac
+ validate_certs: True
+ ca_path: "/path/to/ca_cert.pem"
+
+ tasks:
+ - name: Retrieve group ID based on group name.
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}/api/GroupService/Groups?Name={{ group_name }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: "GET"
+ use_proxy: yes
+ status_code: 200
+ return_content: yes
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path }}"
+ force_basic_auth: yes
+ headers:
+ Content-Type: "application/json"
+ Accept: "application/json"
+ register: group_id
+
+ - name: Assign group ID to a variable.
+ set_fact:
+ group_id_value: "{{ group_id.json.value[0].Id }}"
+
+ - name: Retrieve all devices under the group ID.
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}/api/GroupService/Groups({{ group_id_value }})/AllLeafDevices"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: "GET"
+ use_proxy: yes
+ status_code: 200
+ return_content: yes
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path }}"
+ force_basic_auth: yes
+ headers:
+ Content-Type: "application/json"
+ Accept: "application/json"
+ register: all_devices
+
+ - name: Empty list to store device IDs.
+ set_fact:
+ devices_list: []
+
+ - name: Add devices retrieved from a group to the list.
+ set_fact:
+ devices_list: "{{ devices_list + [item.Id] }}"
+ with_items:
+ - "{{ all_devices.json.value }}"
+
+ - name: Perform device action tasks on devices.
+ dellemc.openmanage.ome_devices:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path }}"
+ device_action: "{{ device_action }}"
+ device_ids: "{{ devices_list }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml
new file mode 100644
index 00000000..027a53d0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_groups.yml
@@ -0,0 +1,57 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible Group configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Create a new device group
+ ome_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ name: "group 1"
+ description: "Group 1 description"
+ parent_group_name: "group parent 1"
+ tags:
+ - create_group
+
+ - name: Modify a device group using the group ID
+ ome_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ group_id: 1234
+ description: "Group description updated"
+ parent_group_name: "group parent 2"
+ tags:
+ - modify_group
+
+ - name: Delete a device group using the device group name
+ ome_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ name: "group 1"
+ tags:
+ - delete_name
+
+ - name: Delete multiple device groups using the group IDs
+ ome_groups:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ group_id:
+ - 1234
+ - 5678
+ tags:
+ - delete_ids
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml
new file mode 100644
index 00000000..b5d960ca
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_identity_pool.yml
@@ -0,0 +1,134 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible identity pool operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create an identity pool using ethernet, FCoE, iSCSI and FC settings."
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ pool_name: "pool1"
+ pool_description: "Identity pool with Ethernet, FCoE, ISCSI and FC settings"
+ ethernet_settings:
+ starting_mac_address: "50:50:50:50:50:00"
+ identity_count: 60
+ fcoe_settings:
+ starting_mac_address: "aabb.ccdd.7070"
+ identity_count: 75
+ iscsi_settings:
+ starting_mac_address: "60:60:60:60:60:00"
+ identity_count: 30
+ initiator_config:
+ iqn_prefix: "iqn.myprefix."
+ initiator_ip_pool_settings:
+ ip_range: "10.33.0.1-10.33.0.255"
+ subnet_mask: "255.255.255.0"
+ gateway: "192.168.4.1"
+ primary_dns_server: "10.8.8.8"
+ secondary_dns_server: "8.8.8.8"
+ fc_settings:
+ starting_address: "10-10-10-10-10-10"
+ identity_count: 45
+ tags: create1
+
+ - name: "Create an identity pool using only ethernet settings."
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ pool_name: "pool2"
+ pool_description: "Identity pool with ethernet"
+ ethernet_settings:
+ starting_mac_address: "aa-bb-cc-dd-ee-aa"
+ identity_count: 80
+ tags: create2
+
+ - name: "Create an identity pool using only iSCSI settings"
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ pool_name: "pool3"
+ pool_description: "Identity pool with iscsi"
+ iscsi_settings:
+ starting_mac_address: "10:10:10:10:10:00"
+ identity_count: 30
+ initiator_config:
+ iqn_prefix: "iqn.myprefix."
+ initiator_ip_pool_settings:
+ ip_range: "20.33.0.1-20.33.0.255"
+ subnet_mask: "255.255.255.0"
+ gateway: "192.168.4.1"
+ primary_dns_server: "10.8.8.8"
+ secondary_dns_server: "8.8.8.8"
+ tags: create3
+
+ - name: "Modify an identity pool using FC settings."
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ pool_name: "pool2"
+ pool_description: "Identity pool with fc_settings"
+ fc_settings:
+ starting_address: "40:40:40:40:40:22"
+ identity_count: 48
+ tags: modify1
+
+ - name: "Modify an identity pool."
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ pool_name: "pool1"
+ new_pool_name: "pool_new"
+ pool_description: "modifying identity pool with ethernet and fcoe settings"
+ ethernet_settings:
+ starting_mac_address: "90-90-90-90-90-90"
+ identity_count: 61
+ fcoe_settings:
+ starting_mac_address: "aabb.ccdd.5050"
+ identity_count: 77
+ tags: modify2
+
+ - name: "Modify an identity pool using iSCSI and FC settings."
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ pool_name: "pool_new"
+ new_pool_name: "pool_new2"
+ pool_description: "modifying identity pool with iscsi and fc settings"
+ iscsi_settings:
+ identity_count: 99
+ initiator_config:
+ iqn_prefix: "iqn1.myprefix2."
+ initiator_ip_pool_settings:
+ gateway: "192.168.4.5"
+ fc_settings:
+ starting_address: "10:10:10:10:10:10"
+ identity_count: 98
+ tags: modify3
+
+ - name: "Delete an identity pool"
+ ome_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ pool_name: "pool1"
+ tags: delete
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml
new file mode 100644
index 00000000..f90892ad
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_job_info.yml
@@ -0,0 +1,35 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible job details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Get all jobs details.
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Get job details for id.
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: 12345
+
+ - name: Get filtered job details.
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ system_query_options:
+ top: 2
+ skip: 1
+ filter: "JobType/Id eq 8" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml
new file mode 100644
index 00000000..c9a8db75
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout.yml
@@ -0,0 +1,32 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage ansible port breakout configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Port breakout configuration.
+ ome_network_port_breakout:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ target_port: "2HB7NX2:phy-port1/1/11"
+ breakout_type: "1X40GE"
+ tags:
+ - port-config
+
+ - name: Revoke the default breakout configuration.
+ ome_network_port_breakout:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ target_port: "2HB7NX2:phy-port1/1/11"
+ breakout_type: "HardwareDefault"
+ tags:
+ - port-default
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml
new file mode 100644
index 00000000..b94b6b48
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_port_breakout_job_traking.yml
@@ -0,0 +1,37 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage ansible port breakout configuration.
+ gather_facts: False
+ vars:
+ retries_count: 50
+ polling_interval: 5 # in seconds
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Port breakout configuration.
+ ome_network_port_breakout:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ target_port: "2HB7NX2:phy-port1/1/11"
+ breakout_type: "1X40GE"
+ register: result
+
+ - name: "Get job details using job id from port breakout configuration task."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.breakout_status.Id }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Failed'
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml
new file mode 100644
index 00000000..d92ef99f
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan.yml
@@ -0,0 +1,62 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible VLAN operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create a VLAN range"
+ ome_network_vlan:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "vlan1"
+ description: "VLAN desc"
+ type: "General Purpose (Bronze)"
+ vlan_minimum: 35
+ vlan_maximum: 40
+ tags: create_vlan_range
+
+ - name: "Create a VLAN with a single value"
+ ome_network_vlan:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "vlan2"
+ description: "VLAN desc"
+ type: "General Purpose (Bronze)"
+ vlan_minimum: 127
+ vlan_maximum: 127
+ tags: create_vlan_single
+
+ - name: "Modify a VLAN"
+ ome_network_vlan:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "vlan1"
+ new_name: "vlan_gold1"
+ description: "new description"
+ type: "General Purpose (Gold)"
+ vlan_minimum: 45
+ vlan_maximum: 50
+ tags: modify_vlan
+
+ - name: "Delete a VLAN"
+ ome_network_vlan:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "vlan1"
+ tags: delete_vlan
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml
new file mode 100644
index 00000000..3cf9c3c2
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_network_vlan_info.yml
@@ -0,0 +1,32 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible OpenManage Enterprise network vlan details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve information about all network VLANs(s) available in the device.
+ ome_network_vlan_info:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Retrieve information about a network VLAN using the VLAN ID.
+ ome_network_vlan_info:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ id: 12345
+
+ - name: Retrieve information about a network VLAN using the VLAN name.
+ ome_network_vlan_info:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ name: "Network VLAN - 1"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml
new file mode 100644
index 00000000..87c124b8
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_info.yml
@@ -0,0 +1,33 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible server interface profile information.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieves the server interface profiles of all the device using device ID.
+ ome_server_interface_profile_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id:
+ - 10001
+ - 10002
+ tags:
+ - sip-device-id
+
+ - name: Retrieves the server interface profiles of all the device using device service tag.
+ ome_server_interface_profile_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - 6GHH6H2
+ - 6KHH6H3
+ tags:
+ - sip-service-tag
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml
new file mode 100644
index 00000000..485a1a24
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profile_workflow.yml
@@ -0,0 +1,125 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible server interface profile workflow.
+ gather_facts: False
+ vars:
+ retries_count: 100
+ polling_interval: 10 #in seconds
+ src_service_tag: 7GHH6H1
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Create a smart fabric.
+ ome_smart_fabric:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "fabric1"
+ description: "fabric desc"
+ fabric_design: "2xMX9116n_Fabric_Switching_Engines_in_same_chassis"
+ primary_switch_service_tag: "6H7J6Z2"
+ secondary_switch_service_tag: "59HW8X2"
+ override_LLDP_configuration: "Enabled"
+ register: fabric_result
+
+ - name: "sleep for 300 seconds and continue with play"
+ wait_for:
+ timeout: 300
+ when: fabric_result.changed == True
+
+ - name: Create a template from a reference device service tag.
+ ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: "{{ src_service_tag }}"
+ attributes:
+ Name: "New_Template_2"
+ Description: "New Template description"
+ register: result
+ failed_when: "'return_id' not in result"
+
+ - name: "Get the job id using return id from template."
+ ome_template_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: "{{ result.return_id }}"
+ register: facts_result
+
+ - name: "Get job details using job id from template task."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ facts_result.template_info[hostname].TaskId }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Failed'
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+
+ - name: Deploy template on multiple devices
+ dellemc.openmanage.ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: "{{ result.return_id }}"
+ device_service_tag:
+ - 6GHH6H1
+ - 6GHH6H2
+ register: deploy_result
+
+ - name: "sleep for 10 seconds and continue with play"
+ wait_for: timeout=10
+
+ - name: "Track the deploy job till completion"
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ deploy_result.return_id }}"
+ register: deploy_job_result
+ failed_when: "'job_info' not in deploy_job_result"
+ until: deploy_job_result.job_info.LastRunStatus.Name == 'Completed' or deploy_job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+
+ - name: Modify Server Interface Profile for the server using the service tag.
+ ome_server_interface_profiles:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - 6GHH6H2
+ nic_teaming: NoTeaming
+ nic_configuration:
+ - nic_identifier: NIC.Mezzanine.1A-1-1
+ team: no
+ untagged_network: 2
+ tagged_networks:
+ names:
+ - vlan
+
+ - name: Retrieves the server interface profiles of all the device using device service tag.
+ ome_server_interface_profile_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - 6GHH6H2
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml
new file mode 100644
index 00000000..c003b714
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_server_interface_profiles.yml
@@ -0,0 +1,57 @@
+---
+- hosts: omem
+ connection: local
+ name: Dell OpenManage Ansible server interface profiles configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Modify Server Interface Profile for the server using the service tag
+ ome_server_interface_profiles:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag:
+ - SVCTAG1
+ - SVCTAG2
+ nic_teaming: LACP
+ nic_configuration:
+ - nic_identifier: NIC.Mezzanine.1A-1-1
+ team: no
+ untagged_network: 2
+ tagged_networks:
+ names:
+ - vlan1
+ - nic_identifier: NIC.Mezzanine.1A-2-1
+ team: yes
+ untagged_network: 3
+ tagged_networks:
+ names:
+ - range120-125
+
+ - name: Modify Server Interface Profile for the server using the id
+ ome_server_interface_profiles:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id:
+ - 34523
+ - 48999
+ nic_teaming: NoTeaming
+ nic_configuration:
+ - nic_identifier: NIC.Mezzanine.1A-1-1
+ team: no
+ untagged_network: 2
+ tagged_networks:
+ names:
+ - vlan2
+ - nic_identifier: NIC.Mezzanine.1A-2-1
+ team: yes
+ untagged_network: 3
+ tagged_networks:
+ names:
+ - range120-125
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml
new file mode 100644
index 00000000..3813458a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric.yml
@@ -0,0 +1,47 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible smart fabric operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create a smart fabric"
+ ome_smart_fabric:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "fabric1"
+ description: "fabric desc"
+ fabric_design: "2xMX9116n_Fabric_Switching_Engines_in_different_chassis"
+ primary_switch_service_tag: "SVTG123"
+ secondary_switch_service_tag: "PXYT456"
+ override_LLDP_configuration: "Enabled"
+ tags: create_smart_fabric
+
+ - name: "Modify a smart fabric"
+ ome_smart_fabric:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ name: "fabric1"
+ new_name: "fabric_gold1"
+ description: "new description"
+ tags: modify_smart_fabric
+
+
+ - name: "Delete a smart fabric"
+ ome_smart_fabric:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "fabric1"
+ tags: delete_smart_fabric
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml
new file mode 100644
index 00000000..88b5cc62
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_smart_fabric_uplink.yml
@@ -0,0 +1,119 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible smart fabric uplink configuration.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create a Uplink"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ fabric_name: "fabric1"
+ name: "uplink1"
+ description: "CREATED from OMAM"
+ uplink_type: "Ethernet"
+ ufd_enable: "Enabled"
+ primary_switch_service_tag: "ABC1234"
+ primary_switch_ports:
+ - ethernet1/1/13
+ - ethernet1/1/14
+ secondary_switch_service_tag: "XYZ1234"
+ secondary_switch_ports:
+ - ethernet1/1/13
+ - ethernet1/1/14
+ tagged_networks:
+ - vlan1
+ - vlan3
+ untagged_network: vlan2
+ tags: create_uplink
+
+ - name: "modify a existing uplink1"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ fabric_name: "fabric1"
+ name: "uplink1"
+ new_name: "uplink2"
+ description: "Modified from OMAM"
+ uplink_type: "Ethernet"
+ ufd_enable: "Disabled"
+ primary_switch_service_tag: "DEF1234"
+ primary_switch_ports:
+ - ethernet1/2/13
+ - ethernet1/2/14
+ secondary_switch_service_tag: "TUV1234"
+ secondary_switch_ports:
+ - ethernet1/2/13
+ - ethernet1/2/14
+ tagged_networks:
+ - vlan11
+ - vlan33
+ untagged_network: vlan22
+ tags: modify_uplink
+
+ - name: "Delete a Uplink"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ fabric_name: "fabric1"
+ name: "uplink1"
+ tags: delete_uplink
+
+ - name: "Modify the Uplink name"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ fabric_name: "fabric1"
+ name: "uplink1"
+ new_name: "uplink2"
+ tags: modify_uplink_name
+
+ - name: "Modify a Uplink ports"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ fabric_name: "fabric1"
+ name: "uplink1"
+ description: "uplink ports modified"
+ primary_switch_service_tag: "ABC1234"
+ primary_switch_ports:
+ - ethernet1/1/6
+ - ethernet1/1/7
+ secondary_switch_service_tag: "XYZ1234"
+ secondary_switch_ports:
+ - ethernet1/1/9
+ - ethernet1/1/10
+ tags: modify_ports
+
+ - name: "Modify Uplink networks"
+ ome_smart_fabric_uplink:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ fabric_name: "fabric1"
+ name: "create1"
+ description: "uplink networks modified"
+ tagged_networks:
+ - vlan4
+ tags: modify_networks
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml
new file mode 100644
index 00000000..433954aa
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_template_identity_pool.yml
@@ -0,0 +1,31 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible template identity pool attach and detach operation.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Attach an identity pool to a template.
+ ome_template_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_name: template_name
+ identity_pool_name: identity_pool_name
+ tags:
+ - attach
+
+ - name: Detach an identity pool from a template.
+ ome_template_identity_pool:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_name: template_name
+ tags:
+ - detach \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml
new file mode 100644
index 00000000..517ff118
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate.yml
@@ -0,0 +1,51 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible - OME Power state operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Power state operation based on device id.
+ ome_powerstate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 11111
+ power_state: "off"
+
+ - name: Power state operation based on device service tag.
+ ome_powerstate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: "KLBR111"
+ power_state: "on"
+
+ - name: Power state operation based on list of device ids.
+ ome_powerstate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: "{{ item.device_id }}"
+ power_state: "{{ item.state }}"
+ with_items:
+ - { "device_id": 11111, "state": "on" }
+ - { "device_id": 22222, "state": "off" }
+
+ - name: Power state operation based on list of device service tags.
+ ome_powerstate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: "{{ item.service_tag }}"
+ power_state: "{{ item.state }}"
+ with_items:
+ - { "service_tag": "KLBR111", "state": "on" }
+ - { "service_tag": "KLBR222", "state": "off" } \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml
new file mode 100644
index 00000000..8393992a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml
@@ -0,0 +1,36 @@
+---
+- hosts: ome
+ vars:
+ retries_count: 5
+ polling_interval: 5 #in seconds
+ connection: local
+ name: "OME - Power state management job tracking."
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Power state operation based on device id"
+ ome_powerstate:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ power_state: "off"
+ device_id: 11111
+ register: result
+ failed_when: "'job_status' not in result"
+
+ - name: "Get job details using job id from power state operation."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{result.job_status.Id}}"
+ register: job_result
+ failed_when: "'job_info' not in job_result"
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml
new file mode 100644
index 00000000..14d43e6a
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile.yml
@@ -0,0 +1,212 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible profile operations.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Create two profiles from a template
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ name_prefix: "omam_profile"
+ number_of_profiles: 2
+ tags:
+ - create_profile
+
+ - name: Create profile with NFS share
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ name_prefix: "omam_profile"
+ number_of_profiles: 1
+ boot_to_network_iso:
+ boot_to_network: True
+ share_type: "NFS"
+ share_ip: "192.168.0.1"
+ iso_path: "/path/to/my_iso.iso"
+ iso_timeout: 8
+ tags:
+ - create_profile_nfs
+
+ - name: Create profile with CIFS share
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: create
+ template_name: "template 1"
+ name_prefix: "omam_profile"
+ number_of_profiles: 1
+ boot_to_network_iso:
+ boot_to_network: True
+ share_type: CIFS
+ share_ip: "192.168.0.2"
+ share_user: "username"
+ share_password: "password"
+ workgroup: "workgroup"
+ iso_path: "\\path\\to\\my_iso.iso"
+ iso_timeout: 8
+ tags:
+ - create_profile_cifs
+
+ - name: Modify profile name with NFS share and attributes
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: modify
+ name: "Profile 00001"
+ new_name: "modified profile"
+ description: "new description"
+ boot_to_network_iso:
+ boot_to_network: True
+ share_type: NFS
+ share_ip: "192.168.0.1"
+ iso_path: "/path/to/my_iso.iso"
+ iso_timeout: 8
+ attributes:
+ Attributes:
+ - Id: 4506
+ Value: "server attr 1"
+ IsIgnored: true
+ - Id: 4507
+ Value: "server attr 2"
+ IsIgnored: true
+ - DisplayName: 'System, Server Topology, ServerTopology 1 Aisle Name'
+ Value: Aisle 5
+ IsIgnored: false
+ tags:
+ - modify_profile
+
+ - name: Delete using profile name
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "delete"
+ name: "Profile 00003"
+ tags:
+ - delete_profile_name
+
+ - name: Delete using filter
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "delete"
+ filters:
+ SelectAll: True
+ Filters: =contains(ProfileName,'Profile 00002')
+ tags:
+ - delete_filter
+
+ - name: Delete using profile list filter
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "delete"
+ filters:
+ ProfileIds:
+ - 17123
+ - 12124
+ tags:
+ - delete_profile_ids
+
+ - name: Assign profile name with network share
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: assign
+ name: "Profile 00001"
+ device_id: 12456
+ boot_to_network_iso:
+ boot_to_network: True
+ share_type: NFS
+ share_ip: "192.168.0.1"
+ iso_path: "/path/to/my_iso.iso"
+ iso_timeout: 8
+ attributes:
+ Attributes:
+ - Id: 4506
+ Value: "server attr 1"
+ IsIgnored: true
+ Options:
+ ShutdownType: 0
+ TimeToWaitBeforeShutdown: 300
+ EndHostPowerState: 1
+ StrictCheckingVlan: True
+ Schedule:
+ RunNow: True
+ RunLater: False
+ tags:
+ - assign_profile
+
+ - name: Unassign using profile name
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "unassign"
+ name: "Profile 00003"
+ tags:
+ - unassign_profile_name
+
+ - name: "Unassign using filters"
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "unassign"
+ filters:
+ SelectAll: True
+ Filters: =contains(ProfileName,'Profile 00003')
+ tags:
+ - unassign_filter
+
+ - name: Unassign using filter
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "unassign"
+ filters:
+ ProfileIds:
+ - 17123
+ - 16123
+ tags:
+ - unassign_profile_list
+
+ - name: Migrate profile
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "migrate"
+ name: "Profile 0001"
+ device_id: 12456
+ tags:
+ - migrate_profile \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml
new file mode 100644
index 00000000..d4c9c772
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_assign_job_tracking.yml
@@ -0,0 +1,47 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible profile operations.
+ gather_facts: False
+ vars:
+ retries_count: 120
+ polling_interval: 30 # 30 seconds x 120 times = 1 hour
+ failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped',
+ 'Canceled']
+ completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused',
+ 'Stopped', 'Canceled']
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Assign a profile to target
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "assign"
+ name: "Profile 00001"
+ device_id: 12456
+ register: result
+
+ - name: End play when no job_id in result
+ meta: end_play
+ when:
+ - result.changed == false
+ - "'job_id' not in result"
+
+ - name: Get job details using job id
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.job_id }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}"
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}"
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml
new file mode 100644
index 00000000..ae7f732b
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_migrate_job_tracking.yml
@@ -0,0 +1,48 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible profile operations.
+ gather_facts: False
+ vars:
+ retries_count: 120
+ polling_interval: 30 # 30 seconds x 120 times = 1 hour
+ failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped',
+ 'Canceled']
+ completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused',
+ 'Stopped', 'Canceled']
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Migrate a profile
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "migrate"
+ name: "Profile 00001"
+ device_id: 12456
+ register: result
+
+ - name: End play when no job_id in result
+ meta: end_play
+ when:
+ - result.changed == false
+ - "'job_id' not in result"
+
+ - name: Get job details using job id
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.job_id }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}"
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}"
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml
new file mode 100644
index 00000000..b1a21312
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/profile/ome_profile_unassign_job_tracking.yml
@@ -0,0 +1,47 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell EMC OpenManage Ansible profile operations.
+ gather_facts: False
+ vars:
+ retries_count: 120
+ polling_interval: 30 # 30 seconds x 120 times = 1 hour
+ failed_states: ['Failed', 'Warning', 'Aborted', 'Paused', 'Stopped',
+ 'Canceled']
+ completed_states: ['Completed', 'Failed', 'Warning', 'Aborted', 'Paused',
+ 'Stopped', 'Canceled']
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+
+ - name: Unassign using profile name
+ ome_profile:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "unassign"
+ name: "Profile 00003"
+ register: result
+
+ - name: End play when no job_id in result
+ meta: end_play
+ when:
+ - result.changed == false
+ - "'job_id' not in result"
+
+ - name: Get job details using job id
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.job_id }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name in "{{ failed_states }}"
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name in "{{ completed_states }}"
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml
new file mode 100644
index 00000000..58ac15ff
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template.yml
@@ -0,0 +1,338 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible device Template service.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create a template from a reference device."
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 25123
+ attributes:
+ Name: "New Template"
+ Description: "New Template description"
+
+ - name: "Modify template name, description, and attribute value."
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "modify"
+ template_id: 12
+ attributes:
+ Name: "New Custom Template"
+ Description: "Custom Template Description"
+ # Attributes to be modified in the template.
+ # For information on any attribute id, use API /TemplateService/Templates(Id)/Views(Id)/AttributeViewDetails
+ # This section is optional
+ Attributes:
+ - Id: 1234
+ Value: "Test Attribute"
+ IsIgnored: false
+
+ - name: Modify template name, description, and attribute using detailed view
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "modify"
+ template_id: 12
+ attributes:
+ Name: "New Custom Template"
+ Description: "Custom Template Description"
+ Attributes:
+ # Enter the comma separated string as appearing in the Detailed view on GUI
+ # NIC -> NIC.Integrated.1-1-1 -> NIC Configuration -> Wake On LAN1
+ - DisplayName: 'NIC, NIC.Integrated.1-1-1, NIC Configuration, Wake On LAN'
+ Value: Enabled
+ IsIgnored: false
+ # System -> LCD Configuration -> LCD 1 User Defined String for LCD
+ - DisplayName: 'System, LCD Configuration, LCD 1 User Defined String for LCD'
+ Value: LCD str by OMAM
+ IsIgnored: false
+
+ - name: "Deploy template on multiple devices "
+ ome_template:
+ hostname: "192.168.0.1"
+ username: "username"
+ password: "password"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_id:
+ - 12765
+ - 10173
+ device_service_tag:
+ - 'SVTG123'
+ - 'SVTG456'
+
+ - name: Deploy template on groups
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_group_names:
+ - server_group_1
+ - server_group_2
+
+ - name: "Deploy template on multiple devices along attributes modification for target device"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_id:
+ - 12765
+ - 10173
+ device_service_tag:
+ - 'SVTG123'
+ attributes:
+ # Device specific attributes to be modified during deployment.
+ # For information on any attribute id, use API /TemplateService/Templates(Id)/Views(Id)/AttributeViewDetails
+ # This section is optional
+ Attributes:
+ # specific device where attribute to be modified at deployment run-time.
+ # The DeviceId should be mentioned above in the 'device_id' section.
+ # Service tags not allowed.
+ - DeviceId: 12765
+ Attributes:
+ - Id: 15645
+ Value: "0.0.0.0"
+ IsIgnored: false
+ - DeviceId: 10173
+ Attributes:
+ - Id: 18968,
+ Value: "hostname-1"
+ IsIgnored: false
+
+ - name: "Deploy template and Operating System (OS) on multiple devices"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_id:
+ - 12765
+ device_service_tag:
+ - 'SVTG123'
+ attributes:
+ # Include this to install OS on the devices.
+ # This section is optional
+ NetworkBootIsoModel:
+ BootToNetwork: true
+ ShareType: "NFS"
+ IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours
+ IsoPath: "/home/iso_path/filename.iso"
+ ShareDetail:
+ IpAddress: "192.168.0.2"
+ ShareName: "sharename"
+ User: "share_user"
+ Password: "share_password"
+ Options:
+ EndHostPowerState: 1
+ ShutdownType: 0
+ TimeToWaitBeforeShutdown: 300
+ Schedule:
+ RunLater: true
+ RunNow: false
+
+ - name: "Deploy template on multiple devices and changes the device-level attributes. After the template is deployed,
+ install OS using its image."
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_id:
+ - 12765
+ - 10173
+ device_service_tag:
+ - 'SVTG123'
+ - 'SVTG456'
+ attributes:
+ Attributes:
+ - DeviceId: 12765
+ Attributes:
+ - Id: 15645
+ Value: "0.0.0.0"
+ IsIgnored: false
+ - DeviceId: 10173
+ Attributes:
+ - Id: 18968,
+ Value: "hostname-1"
+ IsIgnored: false
+ NetworkBootIsoModel:
+ BootToNetwork: true
+ ShareType: "NFS"
+ IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours
+ IsoPath: "/home/iso_path/filename.iso"
+ ShareDetail:
+ IpAddress: "192.168.0.2"
+ ShareName: "sharename"
+ User: "share_user"
+ Password: "share_password"
+ Options:
+ EndHostPowerState: 1
+ ShutdownType: 0
+ TimeToWaitBeforeShutdown: 300
+ Schedule:
+ RunLater: true
+ RunNow: false
+
+ - name: "delete template"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "delete"
+ template_id: 12
+
+ - name: "export a template"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "export"
+ template_id: 12
+
+ # Start of example to export template to a local xml file
+ - name: "export template to a local xml file"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "export"
+ template_name: "my_template"
+ register: result
+ tags:
+ - export_xml_to_file
+ - ansible.builtin.copy:
+ content: "{{ result.Content}}"
+ dest: "/path/to/exported_template.xml"
+ tags:
+ - export_xml_to_file
+ # End of example to export template to a local xml file
+
+ - name: "clone a template"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "clone"
+ template_id: 12
+ attributes:
+ Name: "New Cloned Template Name"
+
+ - name: "import template from XML content"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "import"
+ attributes:
+ Name: "Imported Template Name"
+ # Template Type from TemplateService/TemplateTypes
+ Type: 2
+ # xml string content
+ Content: "<SystemConfiguration Model=\"PowerEdge R940\" ServiceTag=\"SVCTAG1\"
+ TimeStamp=\"Tue Sep 24 09:20:57.872551 2019\">\n<Component FQDD=\"AHCI.Slot.6-1\">\n<Attribute
+ Name=\"RAIDresetConfig\">True</Attribute>\n<Attribute Name=\"RAIDforeignConfig\">Clear</Attribute>\n
+ </Component>\n<Component FQDD=\"Disk.Direct.0-0:AHCI.Slot.6-1\">\n<Attribute Name=\"RAIDPDState\">Ready
+ </Attribute>\n<Attribute Name=\"RAIDHotSpareStatus\">No</Attribute>\n</Component>\n
+ <Component FQDD=\"Disk.Direct.1-1:AHCI.Slot.6-1\">\n<Attribute Name=\"RAIDPDState\">Ready</Attribute>\n
+ <Attribute Name=\"RAIDHotSpareStatus\">No</Attribute>\n</Component>\n</SystemConfiguration>\n"
+
+ - name: "import template from local XML file"
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "import"
+ attributes:
+ Name: "Imported Template Name"
+ Type: 2
+ Content: "{{ lookup('ansible.builtin.file', '/path/to/xmlfile') }}"
+
+ - name: "Deploy template and Operating System (OS) on multiple devices."
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "deploy"
+ template_id: 12
+ device_id:
+ - 12765
+ device_service_tag:
+ - 'SVTG123'
+ attributes:
+ # Include this to install OS on the devices.
+ # This section is optional
+ NetworkBootIsoModel:
+ BootToNetwork: true
+ ShareType: "CIFS"
+ IsoTimeout: 1 # allowable values(1,2,4,8,16) in hours
+ IsoPath: "/home/iso_path/filename.iso"
+ ShareDetail:
+ IpAddress: "192.168.0.2"
+ ShareName: "sharename"
+ User: "share_user"
+ Password: "share_password"
+ Options:
+ EndHostPowerState: 1
+ ShutdownType: 0
+ TimeToWaitBeforeShutdown: 300
+ Schedule:
+ RunLater: true
+ RunNow: false
+
+ - name: Create a compliance template from reference device
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "create"
+ device_service_tag: "SVTG123"
+ template_view_type: "Compliance"
+ attributes:
+ Name: "Configuration Compliance"
+ Description: "Configuration Compliance Template"
+ Fqdds: "BIOS"
+
+ - name: Import a compliance template from XML file
+ ome_template:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ command: "import"
+ template_view_type: "Compliance"
+ attributes:
+ Name: "Configuration Compliance"
+ Content: "{{ lookup('ansible.builtin.file', './test.xml') }}"
+ Type: 2
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml
new file mode 100644
index 00000000..40f4c002
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_create_modify_lcd_display.yml
@@ -0,0 +1,129 @@
+---
+- hosts: ome
+ connection: local
+ name: "Creates a new template from the provided reference server device.
+ Track the template creation job till completion.
+ Fetch the Attribute specific to LCD Configuration settings from the attribute view of the created template.
+ Modify the created template with the user defined LCD string."
+ gather_facts: False
+ vars:
+ retries_count: 50
+ polling_interval: 5
+ reference_device: "MXL4567"
+ template_name: "LCD String Deploy Template"
+ lcd_display_string: "LCD Custom Display Message"
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "create template from the reference server"
+ ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_service_tag: "{{ reference_device }}"
+ attributes:
+ Name: "{{ template_name }}"
+ Description: "LCD Template description"
+ register: result
+
+ - name: "sleep for 30 seconds and continue with play"
+ wait_for: timeout=30
+
+ - name: "Fetch the Task ID from the Template Details using the Template ID"
+ ome_template_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: "{{ result.return_id }}"
+ register: template_result
+
+ - name: "Track the Template Creation Job till Completion"
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ template_result.template_info[hostname].TaskId }}"
+ register: job_result
+ failed_when: "'job_info' not in job_result"
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+
+ - name: "Retrieve the Attribute ID specific to LCD Configuration"
+ uri:
+ url: "https://{{ hostname }}/api/TemplateService/Templates({{ result.return_id }})/Views(1)/AttributeViewDetails"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: "GET"
+ use_proxy: yes
+ status_code: 200
+ return_content: yes
+ validate_certs: no
+ force_basic_auth: yes
+ headers:
+ Content-Type: "application/json"
+ Accept: "application/json"
+ register: config_result
+
+ - name: "System Attribute Groups"
+ set_fact:
+ lcd_fact: "{{ item }}"
+ when:
+ - item.DisplayName=='System'
+ with_items:
+ - "{{ config_result.json.AttributeGroups }}"
+ loop_control:
+ label: "{{ config_result.json.Name }}"
+
+ - name: "LCD System Attributes Groups"
+ set_fact:
+ lcdconfig: "{{ item }}"
+ when:
+ - item.DisplayName=='LCD Configuration'
+ with_items:
+ - "{{ lcd_fact.SubAttributeGroups }}"
+ loop_control:
+ label: "{{ item.DisplayName }}"
+
+ - name: "Retrieve LCD Display Attribute ID"
+ set_fact:
+ lcdattrid: "{{ item.AttributeId }}"
+ when:
+ - item.DisplayName=='LCD 1 User Defined String for LCD'
+ with_items:
+ - "{{ lcdconfig.Attributes }}"
+ loop_control:
+ label: "{{ item.DisplayName }}"
+
+ - name: "Retrieve LCD Config Attribute ID"
+ set_fact:
+ lcdconfigattrid: "{{ item.AttributeId }}"
+ when:
+ - item.DisplayName=='LCD 1 LCD Configuration'
+ with_items:
+ - "{{ lcdconfig.Attributes }}"
+ loop_control:
+ label: "{{ item.DisplayName }}"
+
+ - name: "Modify the created with Custom LCD String to be displayed"
+ ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "modify"
+ template_id: "{{ result.return_id }}"
+ attributes:
+ Name: "{{ template_name }}"
+ Attributes:
+ - Id: "{{ lcdattrid }}"
+ Value: "{{ lcd_display_string }}"
+ IsIgnored: false
+ - Id: "{{ lcdconfigattrid }}"
+ Value: "User Defined"
+ IsIgnored: false \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml
new file mode 100644
index 00000000..3fd200c0
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info.yml
@@ -0,0 +1,33 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible template inventory details.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve basic details of all templates.
+ ome_template_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Retrieve details of a specific template identified by its template ID.
+ ome_template_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: "{{template_id}}"
+
+ - name: Get filtered template info based on name.
+ ome_template_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ system_query_options:
+ filter: "Name eq 'new template'" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml
new file mode 100644
index 00000000..eb040c9c
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_info_with_filter.yml
@@ -0,0 +1,27 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible device Template service.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: get template with filter option.
+ register: result
+ failed_when: "'template_info' not in result or result.template_info['{{hostname}}']['@odata.count'] == 0"
+ ome_template_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ system_query_options:
+ filter: "Name eq 'template_name'"
+ - name: get specific template from result
+ with_subelements:
+ - "{{ result.template_info }}"
+ - value
+ debug:
+ msg: "{{item.1}}"
+ when: item.1['Name']=='template_name'
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml
new file mode 100644
index 00000000..afb472fa
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_lcd_display_string_deploy.yml
@@ -0,0 +1,46 @@
+---
+- hosts: ome
+ connection: local
+ name:
+ - Deploy this template with desired LCD string on the target servers.
+ - Track the template deploy operation job till completion.
+ gather_facts: False
+ vars:
+ retries_count: 50
+ polling_interval: 5
+ template_name: "LCD Srting Deploy Template"
+ deployable_servicetag:
+ - 'MXL1234'
+ - 'MXL4567'
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Deploy Previously created LCD Template "
+ ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "deploy"
+ template_name: "{{ template_name }}"
+ device_service_tag: "{{ deployable_servicetag }}"
+ register: result
+ tags:
+ - deploy
+
+ - name: "Track the deploy job till completion"
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ result.return_id }}"
+ register: job_result
+ failed_when: "'job_info' not in job_result"
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
+ tags:
+ - track_deploy \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml
new file mode 100644
index 00000000..fee07b4e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_network_vlan.yml
@@ -0,0 +1,66 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible template tag and untag.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Tag or untag vlans in template
+ ome_template_network_vlan:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: 78
+ nic_identifier: NIC Slot 4
+ untagged_networks:
+ - port: 1
+ untagged_network_id: 12765
+ - port: 2
+ untagged_network_name: vlan2
+ tagged_networks:
+ - port: 1
+ tagged_network_ids:
+ - 12767
+ - 12768
+ - port: 4
+ tagged_network_ids:
+ - 12767
+ - 12768
+ tagged_network_names:
+ - vlan3
+ - port: 2
+ tagged_network_names:
+ - vlan4
+ - vlan1
+ tags:
+ - tag_untag_vlan
+
+ - name: Clear the tagged and untagged vLANs
+ ome_template_network_vlan:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: 78
+ nic_identifier: NIC Slot 4
+ untagged_networks:
+ # For removing the untagged vLANs for the port 1 and 2
+ - port: 1
+ untagged_network_id: 0
+ - port: 2
+ untagged_network_name: 0
+ tagged_networks:
+ # For removing the tagged vLANs for port 1 and 4
+ - port: 1
+ tagged_network_ids: []
+ - port: 4
+ tagged_network_ids: []
+ tagged_network_names: []
+ - port: 2
+ tagged_network_names: []
+ tags:
+ - clear_tagged_untagged \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml
new file mode 100644
index 00000000..9f93bbdf
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/template/ome_template_with_job_tracking.yml
@@ -0,0 +1,48 @@
+---
+- hosts: ome
+ vars:
+ retries_count: 50
+ polling_interval: 5 #in seconds
+ connection: local
+ name: "OME - Create Template details tracking"
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: "Create template based on device id."
+ ome_template:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ device_id: 12475
+ attributes:
+ Name: "New Template"
+ Description: "New Template description"
+ register: result
+ failed_when: "'return_id' not in result"
+
+ - name: "Get the job id using return id from template."
+ ome_template_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ template_id: "{{ result.return_id }}"
+ register: facts_result
+
+ - name: "Get job details using job id from template task."
+ ome_job_info:
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ ca_path: "/path/to/ca_cert.pem"
+ job_id: "{{ facts_result.template_info[hostname].TaskId }}"
+ register: job_result
+ failed_when: job_result.job_info.LastRunStatus.Name == 'Failed'
+ changed_when: job_result.job_info.LastRunStatus.Name == 'Completed'
+ until: job_result.job_info.LastRunStatus.Name == 'Completed' or job_result.job_info.LastRunStatus.Name == 'Failed'
+ retries: "{{ retries_count }}"
+ delay: "{{ polling_interval }}"
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml
new file mode 100644
index 00000000..b1589cae
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user.yml
@@ -0,0 +1,70 @@
+---
+- hosts: ome
+ connection: local
+ name: Dell OpenManage Ansible User service.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: create new user.
+ ome_user:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ attributes:
+ UserName: "user1"
+ Password: "UserPassword"
+ RoleId: "10"
+ Enabled: True
+
+ - name: create user with all parameters
+ ome_user:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ attributes:
+ UserName: "user2"
+ Description: "user2 description"
+ Password: "UserPassword"
+ RoleId: "10"
+ Enabled: True
+ DirectoryServiceId: 0
+ UserTypeId: 1
+ Locked: False
+ Name: "user2"
+
+ - name: modify existing user
+ ome_user:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "present"
+ attributes:
+ UserName: "user3"
+ RoleId: "10"
+ Enabled: True
+ Description: "Modify user Description"
+
+ - name: delete existing user using id.
+ ome_user:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ user_id: 61874
+
+ - name: delete existing user using name.
+ ome_user:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ state: "absent"
+ name: "name" \ No newline at end of file
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml
new file mode 100644
index 00000000..6016d502
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/user/ome_user_info.yml
@@ -0,0 +1,33 @@
+---
+- hosts: ome
+ connection: local
+ name: Fetching ome user facts.
+ gather_facts: False
+
+ collections:
+ - dellemc.openmanage
+
+ tasks:
+ - name: Retrieve basic details of all accounts.
+ ome_user_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+
+ - name: Retrieve details of a specific account identified by its account ID.
+ ome_user_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ account_id: "{{account_id}}"
+
+ - name: Retrieve details of a specific user using filter with UserName.
+ ome_user_info:
+ hostname: "{{hostname}}"
+ username: "{{username}}"
+ password: "{{password}}"
+ ca_path: "/path/to/ca_cert.pem"
+ system_query_options:
+ filter: "UserName eq 'test'" \ No newline at end of file