summaryrefslogtreecommitdiffstats
path: root/ansible_collections/netapp_eseries/santricity/tests/integration
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:03:42 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 16:03:42 +0000
commit66cec45960ce1d9c794e9399de15c138acb18aed (patch)
tree59cd19d69e9d56b7989b080da7c20ef1a3fe2a5a /ansible_collections/netapp_eseries/santricity/tests/integration
parentInitial commit. (diff)
downloadansible-upstream.tar.xz
ansible-upstream.zip
Adding upstream version 7.3.0+dfsg.upstream/7.3.0+dfsgupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ansible_collections/netapp_eseries/santricity/tests/integration')
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml32
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml117
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml112
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml287
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml220
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml170
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml55
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml64
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml185
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml19
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml128
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml320
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml2
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml185
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml243
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml137
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml88
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml115
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml81
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml104
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml318
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml383
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml88
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml2
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml105
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml65
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml65
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml160
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml1038
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml127
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml768
31 files changed, 5783 insertions, 0 deletions
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml
new file mode 100644
index 00000000..8292ee42
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml
@@ -0,0 +1,32 @@
+# url and credentials - all santricity modules will use this information
+ssid: "1"
+base_url: https://192.168.1.100:8443/devmgr/v2/
+username: admin
+password: adminPass
+validate_cert: false
+
+# proxy url and credentials - modules that require special api testing will use this information
+proxy_ssid: "10"
+proxy_legacy_ssid: "20"
+proxy_base_url: https://192.168.1.200:8443/devmgr/v2/
+proxy_username: admin
+proxy_password: ""
+proxy_validate_cert: false
+
+# na_santricity_auth module variable requirements in addition to both embedded and proxy credentials
+expected_serial_with_proxy_legacy: "711214012345"
+expected_serial_with_proxy_embedded: "021633012345"
+expected_serial_without_proxy: "021628012345"
+proxy_discover_subnet: 192.168.1.0/24
+systems:
+ - ssid: 10 # should match proxy_ssid above
+ addresses: ["192.168.1.110"]
+ - ssid: 20 # should match proxy_legacy_ssid above
+ addresses: ["192.168.1.120"]
+
+
+# na_santricity_ldap module variable requirements
+#bind_user: "CN=bind_user,OU=accounts,DC=test,DC=example,DC=com"
+#bind_password: "bind_password"
+#server_url: "ldap://test.example.com:389"
+#search_base: "OU=users,DC=test,DC=example,DC=com"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml
new file mode 100644
index 00000000..a5463ea8
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml
@@ -0,0 +1,117 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: NetApp Test ASUP module
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Disable alerts
+ na_santricity_alerts:
+ <<: *creds
+ state: disabled
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
+
+- name: Set the initial alerting settings (changed, check_mode)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail.example.com
+ sender: noreply@example.com
+ recipients:
+ - noreply@example.com
+ register: result
+ check_mode: true
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
+
+- name: Set the initial alerting settings (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail.example.com
+ sender: noreply@example.com
+ recipients:
+ - noreply@example.com
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and current_state['json']['alertingEnabled'] and
+ current_state['json']['emailServerAddress'] == 'mail.example.com' and
+ current_state['json']['emailSenderAddress'] == 'noreply@example.com' and
+ current_state['json']['recipientEmailAddresses'] == ['noreply@example.com'] }}"
+ msg: "Failed to enable alerts!"
+
+- name: Set to different alerting settings (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail2.example.com
+ sender: noreply2@example.com
+ recipients:
+ - noreply@example.com
+ - noreply2@example.com
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and current_state['json']['alertingEnabled'] and
+ current_state['json']['emailServerAddress'] == 'mail2.example.com' and
+ current_state['json']['emailSenderAddress'] == 'noreply2@example.com' and
+ (current_state['json']['recipientEmailAddresses'] == ['noreply@example.com', 'noreply2@example.com'] or
+ current_state['json']['recipientEmailAddresses'] == ['noreply2@example.com', 'noreply@example.com']) }}"
+ msg: "Failed to enable alerts!"
+
+- name: Disable alerts again (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: disabled
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml
new file mode 100644
index 00000000..34de206e
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml
@@ -0,0 +1,112 @@
+# Test code for the na_santricity_alerts_syslog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Set facts for na_santricity_alerts_syslog module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Delete all alert syslog servers
+ na_santricity_alerts_syslog:
+ <<: *creds
+
+- name: Add alert syslog servers (change, check_mode)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add alert syslog servers (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add alert syslog servers (no change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove one alert syslog server (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one alert syslog server (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete all alert syslog servers (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete all alert syslog servers (no change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml
new file mode 100644
index 00000000..fd66149f
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml
@@ -0,0 +1,287 @@
+# Test code for the na_santricity_asup module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Enable auto-support using default values
+ na_santricity_asup:
+ <<: *creds
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected default state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == 0 and
+ current.json.schedule.dailyMaxTime == 1439 }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']) }}"
+
+- name: Disable auto-support
+ na_santricity_asup:
+ <<: *creds
+ state: disabled
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support is disabled
+ assert:
+ that: "{{ not current.json.asupEnabled }}"
+ msg: "Auto-support failed to be disabled"
+
+- name: Enable auto-support using specific values
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 22
+ end: 24
+ days:
+ - friday
+ - saturday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (22 * 60) and
+ current.json.schedule.dailyMaxTime == (24 * 60 - 1) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['friday', 'saturday']) }}"
+
+- name: Auto auto-support schedule
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (0 * 60) and
+ current.json.schedule.dailyMaxTime == (5 * 60) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
+
+- name: Repeat auto-support schedule change to verify idempotency
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+ register: result
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (0 * 60) and
+ current.json.schedule.dailyMaxTime == (5 * 60) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
+- name: Validate change was not detected
+ assert:
+ that: "{{ not result.changed }}"
+ msg: "Invalid change was detected"
+
+- name: Auto auto-support schedule
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: false
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and not current.json.onDemandEnabled and not current.json.remoteDiagsEnabled }}"
+ msg: "Unexpected auto-support state"
+
+- name: Auto auto-support direct delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: http
+ routing_type: direct
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'http' }}"
+ msg: "Delievery method should be http!"
+
+- name: Auto auto-support direct delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: direct
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' }}"
+ msg: "Delievery method should be https!"
+
+- name: Auto auto-support proxy delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: proxy
+ proxy:
+ host: 192.168.1.1
+ port: 1000
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' and
+ current['json']['delivery']['proxyHost'] == '192.168.1.1' and
+ current['json']['delivery']['proxyPort'] == 1000 }}"
+ msg: "Delievery method should be https-proxy-host!"
+
+- name: Auto auto-support proxy-script delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: script
+ proxy:
+ script: autosupport_script.sh
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' and
+ current['json']['delivery']['proxyScript'] == 'autosupport_script.sh' }}"
+ msg: "Delievery method should be https-proxy-script!"
+
+- name: Auto auto-support email delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: email
+ email:
+ server: server@example.com
+ sender: noreply@example.com
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'smtp' and
+ current['json']['delivery']['mailRelayServer'] == 'server@example.com' and
+ current['json']['delivery']['mailSenderAddress'] == 'noreply@example.com' }}"
+ msg: "Delievery method should be email!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml
new file mode 100644
index 00000000..424ba2e5
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml
@@ -0,0 +1,220 @@
+# Test code for the na_santricity_auditlog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Note: If audit-log is full then clear it before testing, since it can result in expect 422, symbol errors.
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "PROXY"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ proxy_embedded_credentials: &proxy_embedded_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+
+- name: Set audit log settings to the defaults
+ na_santricity_auditlog:
+ <<: *creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (change, check_mode)
+ na_santricity_auditlog:
+ <<: *creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (change)
+ na_santricity_auditlog:
+ <<: *creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
+
+- name: Set audit log settings to the defaults (proxy)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change, check_mode)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
+
+- name: Set audit log settings to the defaults (proxy)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change, check_mode)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml
new file mode 100644
index 00000000..12c55252
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml
@@ -0,0 +1,170 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Clear embedded, legacy, and passwords before executing integration tests!
+
+- name: Set initial credential variables
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ proxy_legacy_credentials: &proxy_legacy_creds
+ ssid: "{{ proxy_legacy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+
+# TODO: series of tests for embedded
+# Validate admin passwords are updated regardless of supplied api_password and current_admin_password options
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ minimum_password_length: 8
+ password: infiniti
+ user: admin
+
+- name: Set storage system's non-admin passwords (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ password: "{{ item }}_password"
+ user: "{{ item }}"
+ ignore_errors: true
+ loop: ["monitor", "support", "security", "storage"]
+
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: ""
+ validate_certs: "{{ validate_cert }}"
+ minimum_password_length: 8
+
+- name: Set proxy's initial password (proxy, changed)
+ na_santricity_auth:
+ ssid: proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ password: infiniti
+ user: admin
+
+# # TODO: series of tests for proxy
+- name: Add storage systems to proxy without passwords
+ na_santricity_proxy_systems:
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ systems: "{{ systems }}"
+ password: ""
+
+# Validate proxy system's admin passwords are updated regardless of current_admin_password options
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ <<: *proxy_creds
+ minimum_password_length: 8
+ current_admin_password: "" # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: infiniti
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ <<: *proxy_legacy_creds
+ minimum_password_length: 8
+ current_admin_password: "" # THIS NEEDS TO MATCH LEGACY STORAGE SYSTEM'S STORED-PASSWORD
+ password: infiniti
+ user: admin
+
+- pause: seconds=10
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: "{{ item }}_password"
+ user: "{{ item }}"
+ loop: ["monitor", "support", "security", "storage"]
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ ssid: "20"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ ssid: proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: Proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 8
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 8
+
+- name: Remove storage system from proxy
+ na_santricity_proxy_systems:
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml
new file mode 100644
index 00000000..9f3964d9
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml
@@ -0,0 +1,55 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ certificates:
+ - "/home/swartzn/ExampleRootCA.crt"
+ - "/home/swartzn/ExampleIssuingCA.crt"
+ - "/home/swartzn/ExampleClient.crt"
+
+- name: Remove certificates
+ na_santricity_client_certificate:
+ <<: *creds
+
+- name: Upload certificate (changed, check_mode)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+ check_mode: true
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to upload certificates to storage array."
+
+- name: Upload certificate (changed)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to upload certificates to storage array."
+
+- name: Repeat upload certificate (no change)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+- assert:
+ that: "{{ not result['changed'] }}"
+ msg: "Failed not to make any changes."
+
+- name: Remove certificates
+ na_santricity_client_certificate:
+ <<: *creds
+ register: result
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to remove uploaded certificates" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml
new file mode 100644
index 00000000..38c18f97
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml
@@ -0,0 +1,64 @@
+# Test code for the na_santricity_discover module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Discover storage systems using SANtricity Web Services Proxy
+ na_santricity_discover:
+ proxy_url: "{{ proxy_base_url }}"
+ proxy_username: "{{ proxy_username }}"
+ proxy_password: "{{ proxy_password }}"
+ proxy_validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ prefer_embedded: false
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_with_proxy_legacy %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == "" or api_url != proxy_base_url
+
+- name: Discover storage systems using SANtricity Web Services Proxy with a preference for embedded url
+ na_santricity_discover:
+ proxy_url: "{{ proxy_base_url }}"
+ proxy_username: "{{ proxy_username }}"
+ proxy_password: "{{ proxy_password }}"
+ proxy_validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ prefer_embedded: true
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_with_proxy_embedded %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == "" or api_url == proxy_base_url
+
+- name: Discover storage systems not using SANtricity Web Services Proxy (requires SANtricity version 11.60.2 or later)
+ na_santricity_discover:
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_without_proxy %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == ""
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml
new file mode 100644
index 00000000..5559691d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml
@@ -0,0 +1,185 @@
+# Test code for the na_santricity_drive_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Existing symbol issue: occasionally symbol will return 422 which causes Ansible to fail; however the drive firmware download will complete.
+# Work-around: Remove all storage provisioning before commencing test.
+
+- name: Set necessary credentials and other facts.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ firmware:
+ downgrade:
+ list:
+ - "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
+ - "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
+ check:
+ - firmware: "D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
+ drive: "PX04SVQ160"
+ version: "MSB6"
+ - firmware: "D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
+ drive: "ST1200MM0017"
+ version: "MS00"
+ upgrade:
+ list:
+ - "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603183_MS00_6600_001.dlp"
+ - "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_30602214_MS02_5600_002.dlp"
+ check:
+ - firmware: "D_PX04SVQ160_30603183_MS00_6600_001.dlp"
+ drive: "PX04SVQ160"
+ version: "MS00"
+ - firmware: "D_ST1200MM0017_30602214_MS02_5600_002.dlp"
+ drive: "ST1200MM0017"
+ version: "MS02"
+
+- name: Set drive firmware (baseline, maybe change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['downgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, change-checkmode)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+ check_mode: true
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, no change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ not drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (downgrade, change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['downgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml
new file mode 100644
index 00000000..14cc43c6
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml
@@ -0,0 +1,19 @@
+# Test code for the na_santricity_facts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Retrieve facts from SANtricity Web Services Embedded
+ na_santricity_facts:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Retrieve facts from SANtricity Web Services Proxy
+ na_santricity_facts:
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml
new file mode 100644
index 00000000..6aff714c
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml
@@ -0,0 +1,128 @@
+# Test code for the na_santricity_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# TODO: MUST BE DOWNGRADE BEFORE EXECUTING INTEGRATION TO RCB_11.40.3R2_280x_5c7d81b3.dlp and N280X-842834-D02.dlp
+# loadControllerFirmware_MT swartzn@10.113.1.250 /home/swartzn/Downloads/RCB_11.40.3R2_280x_5c7d81b3.dlp /home/swartzn/Downloads/N280X-842834-D02.dlp
+
+# This integration test will validate upgrade functionality for firmware-only, firmware-and-nvsram, and check mode.
+- name: Set credentials and other facts
+ set_fact:
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_legacy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ path: "/home/swartzn/Downloads/"
+ upgrades:
+ - firmware: "RC_08405000_m3_e10_840_5600.dlp"
+ nvsram: "N5600-840834-D03.dlp"
+ expected_firmware_version: "08.40.50.00"
+ expected_nvsram_version: "N5600-840834-D03"
+ - firmware: "RC_08403000_m3_e10_840_5600.dlp"
+ nvsram: "N5600-840834-D03.dlp"
+ expected_firmware_version: "08.40.30.00"
+ expected_nvsram_version: "N5600-840834-D03"
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services Proxy (check_mode, changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+ check_mode: true
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml
new file mode 100644
index 00000000..99827e1b
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml
@@ -0,0 +1,320 @@
+# Test code for the na_santricity_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# TODO: MUST BE DOWNGRADE BEFORE EXECUTING INTEGRATION TO RCB_11.40.3R2_280x_5c7d81b3.dlp and N280X-842834-D02.dlp
+# loadControllerFirmware_MT swartzn@10.113.1.250 /home/swartzn/Downloads/RCB_11.40.3R2_280x_5c7d81b3.dlp /home/swartzn/Downloads/N280X-842834-D02.dlp
+
+# This integration test will validate upgrade functionality for firmware-only, firmware-and-nvsram, and check mode.
+- name: Set credentials and other facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ path: "/home/swartzn/Downloads/"
+ upgrades:
+ - firmware: "RCB_11.40.3R2_280x_5c7d81b3.dlp"
+ nvsram: "N280X-842834-D02.dlp"
+ expected_firmware_version: "08.42.30.05"
+ expected_nvsram_version: "N280X-842834-D02"
+ - firmware: "RCB_11.40.5_280x_5ceef00e.dlp"
+ nvsram: "N280X-842834-D02.dlp"
+ expected_firmware_version: "08.42.50.00"
+ expected_nvsram_version: "N280X-842834-D02"
+ - firmware: "RCB_11.50.2_280x_5ce8501f.dlp"
+ nvsram: "N280X-852834-D02.dlp"
+ expected_firmware_version: "08.52.00.00"
+ expected_nvsram_version: "N280X-852834-D02"
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-no change, firmware only)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return unchanged."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (no change, firmware only)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+ check_mode: true
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (change, firmware)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version. {{ current_firmware['json'][0] }} != {{ upgrades[1]['expected_firmware_version'] }}"
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version. {{ current_nvsram['json'][0] }} != {{ upgrades[1]['expected_nvsram_version'] }}"
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-unchanged, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return unchanged."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware and nvsram)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (changed, firmware and nvsram)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[2]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[2]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml
new file mode 100644
index 00000000..15edc520
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml
@@ -0,0 +1,2 @@
+- include_tasks: firmware_tests.yml
+- include_tasks: firmware_legacy_tests.yml
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml
new file mode 100644
index 00000000..9d6e6df9
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml
@@ -0,0 +1,185 @@
+# Test code for the nac_sancticity_global module.
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- include_vars: "../../integration_config.yml"
+
+- name: Set initial global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname01
+ cache_block_size: 32768
+ cache_flush_threshold: 80
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: linux dm-mp
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Repeat initial global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname01
+ cache_block_size: 32768
+ cache_flush_threshold: 80
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: linux dm-mp
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ not result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change global settings (check-mode)
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname02
+ cache_block_size: 8192
+ cache_flush_threshold: 60
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: windows
+ check_mode: true
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname02
+ cache_block_size: 8192
+ cache_flush_threshold: 60
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: windows
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname02' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Turn on autoload balancing which should force enable host connection reporting
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ automatic_load_balancing: enabled
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname02' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ graph['json'][0]['autoLoadBalancingEnabled'] and
+ graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change array name only
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname03
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname03' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ graph['json'][0]['autoLoadBalancingEnabled'] and
+ graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml
new file mode 100644
index 00000000..cb460a9e
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml
@@ -0,0 +1,243 @@
+# Test code for the na_santricity_host module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Create iSCSI host
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+
+- name: Create FC host
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+
+- name: Attempt to change FC host port using different port case (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: FC
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: FC
+ label: fc_p2
+ port: "01:23:45:67:89:1A:BC:DE"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create iSCSI host (no change)
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (change, check_mode)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ check_mode: true
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Change FC host label to uppercase (change)
+ na_santricity_host:
+ <<: *creds
+ name: Linux_FC_Host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Change FC host port labels to uppercase (change)
+ na_santricity_host:
+ <<: *creds
+ name: Linux_FC_Host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: FC_P1
+ port: "0x1122334455667788"
+ - type: fc
+ label: FC_P2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete iSCSI host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete FC host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: Linux_FC_Host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete second FC host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc2_host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete iSCSI host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete second FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: Linux_FC_Host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc2_host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml
new file mode 100644
index 00000000..8a2af77d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml
@@ -0,0 +1,137 @@
+# Test code for the na_santricity_hostgroup module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Setup hosts for the groups
+ block:
+ - name: Create iSCSI host
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+ - name: Create FC host
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+
+- name: Create host group and add one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create host group and add one host (no change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change, check_mode)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change, check_mode)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - linux_fc_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete host group (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ state: absent
+ name: hostgroup_test
+
+- name: Delete hosts for the groups
+ block:
+ - name: Delete iSCSI host
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+
+ - name: Delete FC host
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc_host
+ register: results
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml
new file mode 100644
index 00000000..d2d8142b
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml
@@ -0,0 +1,88 @@
+# Test code for the na_santricity_ib_iser_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_ib_iser_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ interface_a1_ip: &a1_ip 192.168.1.101
+ interface_a2_ip: &a2_ip 192.168.2.101
+
+- name: Set the initial ib_iser interfaces
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+
+- name: Repeat the initial ib_iser interfaces (no change)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+- name: Verify no changes were made
+ assert:
+ that: "{{ not item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial ib_iser interfaces (changed, check_mode)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+ check_mode: true
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial ib_iser interfaces (changed)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Revert to the initial ib_iser interfaces (changed)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml
new file mode 100644
index 00000000..38b6faba
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml
@@ -0,0 +1,115 @@
+# Test code for the na_santricity_iscsi_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_interface module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Set controller iSCSI interfaces to DHCP
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: "{{ item }}"
+ port: 1
+ config_method: dhcp
+ mtu: 1500
+ loop: ["A", "B"]
+
+- name: Set controller A iSCSI interface to static (change, check_mode)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Set controller A iSCSI interface to static (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Set controller A iSCSI interface to static (no change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller B iSCSI interface to static (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: B
+ port: 1
+ config_method: static
+ address: 192.168.1.200
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller A iSCSI interface MTU to 9000 (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 9000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller iSCSI interfaces to DHCP
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: "{{ item }}"
+ port: 1
+ config_method: dhcp
+ mtu: 1500
+ loop: ["A", "B"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml
new file mode 100644
index 00000000..b259ec87
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml
@@ -0,0 +1,81 @@
+# Test code for the na_santricity_iscsi_target module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_target module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Set initial iSCSI target state
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: false
+ unnamed_discovery: false
+ chap_secret: "chappySecret"
+
+- name: Clear chap secret
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: false
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Make iSCSI target pingable (change, check_mode)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target pingable (change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target pingable (no change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target discoverable (change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml
new file mode 100644
index 00000000..b7b57df1
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml
@@ -0,0 +1,104 @@
+# Test code for the nac_sancticity_ldap module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- include_vars: "../../integration_config.yml"
+
+- set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ ldap_info: &info
+ bind_user: "{{ bind_user }}"
+ bind_password: "{{ bind_password }}"
+ server_url: "{{ server_url }}"
+ search_base: "{{ search_base }}"
+ role_mappings:
+ - ".*":
+ - storage.admin
+ - security.admin
+ - support.admin
+ - storage.monitor
+ - ".*":
+ - storage.monitor
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: disabled
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: disabled
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Define a default LDAP domain, utilizing defaults where possible (changed, check_mode)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ check_mode: true
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Define a default LDAP domain, utilizing defaults where possible (changed)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Define a default LDAP domain, utilizing defaults where possible (no change)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Define a default LDAP domain, utilizing defaults where possible (change)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[1] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: absent
+ identifier: test1
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml
new file mode 100644
index 00000000..37955fbd
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml
@@ -0,0 +1,318 @@
+# Test code for the na_santricity_lun_mapping module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# ****************************************************
+# *** Setup test hosts, storage pools, and volumes ***
+# ****************************************************
+- name: Create host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host_mapping_host
+ host_type: 27
+- na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host1
+ host_type: 27
+- na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host2
+ host_type: 27
+- name: Create storage pool for host mapping
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: test_host_mapping_storage_pool
+ raid_level: raid0
+ criteria_min_usable_capacity: 1
+- name: Create volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: test_host_mapping_volume
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Create volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: test_host_mapping_volume2
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+
+# **********************************************
+# *** Create new lun between host and volume ***
+# **********************************************
+- name: Create na_santricity_lun_mapping
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# QUICK VERIFICATION OF MISMATCHING TARGET/TARGET_TYPE - GOOD
+#- name: Create na_santricity_lun_mapping
+# na_santricity_lun_mapping:
+# <<: *creds
+# state: present
+# target: test_host_mapping_host
+# volume: test_host_mapping_volume
+# lun: 100
+# target_type: group
+# register: result
+#
+#- pause: seconds=30
+# **************************************************************
+# *** Repeat previous lun creation play and verify unchanged ***
+# **************************************************************
+- name: Repeat lun creation
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] and result.changed==False }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ****************************************************************
+# *** Move existing lun to default target and verify unchanged ***
+# ****************************************************************
+- name: Move lun to default target
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *****************************************************************
+# *** Move existing lun to specific target and verify unchanged ***
+# *****************************************************************
+- name: Move lun to default target
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *******************************************
+# *** Modify a volume mapping's lun value ***
+# *******************************************
+- name: Change volume mapping's lun value
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ lun: 100
+ register: result
+
+- pause: seconds=15
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+- name: Verify mapping fails when lun already in use on existing host object
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume2
+ lun: 100
+ register: result
+ ignore_errors: True
+
+- pause: seconds=15
+
+- assert:
+ that: "{{ not result.changed }}"
+ msg: "Lun succeeded when it should have failed."
+ loop: "{{ lookup('list', current.json)}}"
+
+- name: Verify mapping succeeds when the same lun is used on multiple host objects.
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host1
+ volume: test_host_mapping_volume2
+ lun: 100
+ register: result
+
+- pause: seconds=15
+
+- assert:
+ that: "{{ result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *************************************************************************************************
+# *** Verify that exact mapping details but different lun results in an unchanged configuration ***
+# *************************************************************************************************
+- name: Verify that exact mapping details but different lun results in an unchanged configuration
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: absent
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ lun: 99
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] and not result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ********************************
+# *** Delete newly created lun ***
+# ********************************
+- name: Delete lun creation
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: absent
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ not item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ********************************************************
+# *** Tear down test hosts, storage pools, and volumes ***
+# ********************************************************
+- name: Delete volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_volume
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Delete volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_volume2
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Delete storage pool for host mapping
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_storage_pool
+ raid_level: raid0
+ criteria_min_usable_capacity: 1
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_host
+ host_type_index: 27
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host2
+ host_type_index: 27
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host1
+ host_type_index: 27 \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml
new file mode 100644
index 00000000..15aebf4f
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml
@@ -0,0 +1,383 @@
+# Test code for the nac_sancticity_mgmt_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Note: Ensure controller B has IPv6 enabled, otherwise the first task will fail.
+
+- include_vars: "../../integration_config.yml"
+
+- set_fact:
+ controller_a: '070000000000000000000001'
+ controller_b: '070000000000000000000002'
+ original_channel_a1_info: &channel_a1_info
+ state: enabled
+ address: 10.113.1.192
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+ config_method: static
+ dns_config_method: static
+ dns_address: 10.193.0.250
+ dns_address_backup: 10.192.0.250
+ ntp_config_method: static
+ ntp_address: 216.239.35.0
+ ntp_address_backup: 216.239.35.4
+ ssh: true
+ original_channel_b1_info: &channel_b1_info
+ state: enabled
+ address: 10.113.1.193
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+ config_method: static
+ dns_config_method: static
+ dns_address: 10.193.0.250
+ dns_address_backup: 10.192.0.250
+ ntp_config_method: static
+ ntp_address: 216.239.35.0
+ ntp_address_backup: 216.239.35.4
+ ssh: true
+ address_info_list: &test_info
+ address: 10.113.1.251
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+
+- name: Set controller A port 1 to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ controller: A
+ port: "1"
+ config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url.replace(original_channel_a1_info['address'], original_channel_b1_info['address']) }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller A port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_a or item['channel'] != 1) or item['ipv4AddressConfigMethod'] == 'configDhcp' }}"
+ msg: "Failed to set controller A port 1 to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Restore controller A port 1 to static
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url.replace(original_channel_a1_info['address'], original_channel_b1_info['address']) }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ controller: A
+ port: "1"
+ <<: *channel_a1_info
+
+- name: Disable controller B port 1
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "disabled"
+ port: "1"
+ controller: B
+
+- name: Set controller B port 1 to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_b or item['channel'] != 1) or item['ipv4AddressConfigMethod'] == 'configDhcp' }}"
+ msg: "Failed to set controller B port 1 to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Set controller B port 1 to static ip address (changed, check_mode)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ check_mode: true
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static ip address
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ipv4AddressConfigMethod'] == 'configDhcp') }}"
+ msg: "Failed to set controller B port 1 to static ip address!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Set controller B port 1 to static ip address (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static ip address
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['ipv4AddressConfigMethod'] == 'configStatic' and
+ item['ipv4Address'] == address_info_list['address'] and
+ item['ipv4SubnetMask'] == address_info_list['subnet_mask'] and
+ item['ipv4GatewayAddress'] == address_info_list['gateway'])) }}"
+ msg: "Failed to set controller B port 1 to static ip address!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 dns setting to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ dns_config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['dnsProperties']['acquisitionProperties']['dnsAcquisitionType'] == 'dhcp') }}"
+ msg: "Failed to set controller B port 1 dns setting to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 dns is set to static (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ dns_config_method: static
+ dns_address: 192.168.1.1
+ dns_address_backup: 192.168.1.2
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 dns is set to static
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['dnsProperties']['acquisitionProperties']['dnsAcquisitionType'] == 'stat') and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][0]['addressType'] == 'ipv4' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][0]['ipv4Address'] == '192.168.1.1' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][1]['addressType'] == 'ipv4' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][1]['ipv4Address'] == '192.168.1.2') }}"
+ msg: "Failed to set controller B port 1 dns setting to static!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: disable controller B port 1 ntp settings (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: disabled
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'disabled' }}"
+ msg: "Failed to disable controller B port 1 ntp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 ntp setting to dhcp (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: dhcp
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'dhcp') }}"
+ msg: "Failed to set controller B port 1 ntp setting to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 ntp setting to static (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: static
+ ntp_address: 192.168.1.1
+ ntp_address_backup: 192.168.1.2
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'stat') and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['addrType'] == 'ipvx' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['ipvxAddress']['addressType'] == 'ipv4' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['ipvxAddress']['ipv4Address'] == '192.168.1.1' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['addrType'] == 'ipvx' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['ipvxAddress']['addressType'] == 'ipv4' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['ipvxAddress']['ipv4Address'] == '192.168.1.2') }}"
+ msg: "Failed to set controller B port 1 ntp setting to static!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: disable controller B ssh
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ ssh: false
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/controllers"
+ register: controllers
+- name: Validate controller B ssh is enabled
+ assert:
+ that: "{{ item['controllerRef'] != controller_b or not item['networkSettings']['remoteAccessEnabled'] }}"
+ msg: "Failed to disable controller B ssh!"
+ loop: "{{ lookup('list', controllers['json']) }}"
+
+- name: enable controller B ssh (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ ssh: true
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/controllers"
+ register: controllers
+- name: Validate controller B ssh is enabled
+ assert:
+ that: "{{ result['changed'] and (item['controllerRef'] != controller_b or item['networkSettings']['remoteAccessEnabled']) }}"
+ msg: "Failed to set controller B port 1 ntp setting to static!"
+ loop: "{{ lookup('list', controllers['json']) }}"
+
+- name: Restore controller B port 1 settings
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ <<: *channel_b1_info
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml
new file mode 100644
index 00000000..260f3d7f
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml
@@ -0,0 +1,88 @@
+# Test code for the na_santricity_nvme_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_nvme_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: 1
+ api_url: https://192.168.1.100:8443/devmgr/v2/
+ api_username: admin
+ api_password: adminpassword
+ validate_certs: false
+ interface_a1_ip: 192.168.1.1
+ interface_b1_ip: 192.168.2.1
+
+- name: Set the initial nvme interfaces
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+
+- name: Repeat the initial nvme interfaces (no change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ not item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial nvme interfaces (changed, check_mode)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+ check_mode: true
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial nvme interfaces (changed)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Revert to the initial nvme interfaces (changed)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml
new file mode 100644
index 00000000..82f5ba16
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml
@@ -0,0 +1,2 @@
+- include_tasks: ib.yml
+- include_tasks: roce.yml
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml
new file mode 100644
index 00000000..70bfe55d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml
@@ -0,0 +1,105 @@
+# Test code for the na_santricity_nvme_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_nvme_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: 1
+ api_url: https://192.168.1.100:8443/devmgr/v2/
+ api_username: admin
+ api_password: adminpassword
+ validate_certs: false
+ original_interface: &iface
+ address: 192.168.131.101
+ subnet_mask: 255.255.255.0
+ gateway: 0.0.0.0
+
+- name: Ensure NVMeoF interfaces are properly configured.
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: dhcp
+ mtu: 9000
+ speed: 25
+
+- name: Ensure NVMeoF interfaces are properly configured (no change).
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: dhcp
+ mtu: 9000
+ speed: 25
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Ensure NVMe interfaces are properly configured. (change, check_mode)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Ensure NVMe interfaces are properly configured. (change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Ensure NVMe interfaces are properly configured. (no change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Ensure NVMeoF interfaces are properly configured. (change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ <<: *iface
+ controller: A
+ channel: 1
+ config_method: static
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml
new file mode 100644
index 00000000..c261abff
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml
@@ -0,0 +1,65 @@
+# Test code for the na_santricity_proxy_drive_firmware_upload module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Test na_santricity_proxy_drive_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ firmware:
+ - /home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603182_MS00_5600_001.dlp
+ - /home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603299_MSB6_224C_705.dlp
+
+- name: Clear any existing proxy drive firmware
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+
+- name: Clear any existing proxy drive firmware (no changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify all drive firmware has been removed
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed, check_mode)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+ check_mode: true
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (no changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml
new file mode 100644
index 00000000..d4b9f02d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml
@@ -0,0 +1,65 @@
+# Test code for the na_santricity_proxy_firmware_upload module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Test na_santricity_proxy_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ firmware:
+ - /home/swartzn/Downloads/N5600-840834-D03.dlp
+ - /home/swartzn/Downloads/RC_08405000_m3_e10_840_5600.dlp
+
+- name: Clear any existing proxy drive firmware
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+
+- name: Clear any existing proxy drive firmware (no changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify all drive firmware has been removed
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed, check_mode)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+ check_mode: true
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (no changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml
new file mode 100644
index 00000000..1475cda9
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml
@@ -0,0 +1,160 @@
+# Test code for the na_santricity_proxy_systems module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# NOTE: Running this test back-to-back can result in a 10 minute lock-out
+
+- name: Test na_santricity_proxy_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ subnet: 192.168.1.10/24
+ small_subnet: 192.168.1.10/31 # Be sure to know the systems included in this subnet since they will be discovered and not specified.
+ systems:
+ - ssid: "10"
+ serial: "021633035190"
+ password: "password"
+ - ssid: "20"
+ serial: "711214000794"
+ password: "password"
+
+- name: Ensure no systems have been added.
+ na_santricity_proxy_systems:
+ <<: *creds
+
+- name: Add multiple systems using serial numbers and a common password (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add multiple systems using serial numbers and a common password (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add multiple systems using serial numbers and a common password (no change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (no change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add any other available system on the subnet (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ small_subnet }}"
+ add_discovered_systems: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml
new file mode 100644
index 00000000..664df595
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml
@@ -0,0 +1,1038 @@
+# Test code for the na_santricity_storagepool module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+#
+# Raid levels tested: raid0, raid1, raid5, raid6, disk pool
+# Actions covered: create w/capacity, create w/drive count, repeat create (no changes), extend w/capacity,
+# extend w/drive count, delete, migrate raid levels (raid0->raid6, 1->5, 5->1, 6->0),
+# secure pool for raid0, erasing drives on creation, erasing drives on deletion,
+# setting reserve drive count for ddp,
+
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# Ensure that test starts without storage pools
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ erase_secured_drives: yes
+ name: "{{ item }}"
+ loop:
+ - raid0_storage
+ - raid1_storage
+ - raid5_storage
+ - raid6_storage
+ - raidDiskPool_storage
+
+# Raid0
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid0 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid0 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Extend storage pool to 2400gb minimum usable capacity.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 2400
+ criteria_size_unit: gb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 2576980377600 }}"
+ msg: "raid0 storage pool using raid0 failed to be extended to a minimum of 2400gb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Expand simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid0' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to be extended to 6 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid0 storage pool to raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to migrate to raid6"
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid0_storage
+
+
+# Raid1
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid1' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid1 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid1_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid1' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid1 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid1_storage`]') }}"
+
+- name: Expand simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_drive_count: 6
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid1' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid1 storage pool failed to be extended."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid1 storage pool to raid5
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid1 storage pool failed to migrate to raid5."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid1_storage
+
+
+# Raid5
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid5 storage pool failed to be created."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: (Rerun) Create simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ not results.changed and results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid5 storage pool failed not to be modified."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Expand simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 8
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 8}}"
+ msg: "raid5 storage pool failed to be modified to 8 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid5 storage pool to raid1
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 8
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid1' and
+ (current_drives.json | json_query(count_query) | length) == 8}}"
+ msg: "raid5 storage pool failed to migrate to raid1."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid5_storage
+
+
+# raid6
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_drive_count: 5
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and
+ (current_drives.json | json_query(count_query) | length) == 5}}"
+ msg: "raid6 storage pool failed to be created with 5 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Extend simple storage pool using raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_min_usable_capacity: 3.4
+ criteria_size_unit: tb
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid6' and (item.totalRaidedSpace | int) >= 3738339534438 }}"
+ msg: "raid6 storage pool failed to be extended to a minimum of 3.4tb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid6_storage`]') }}"
+
+- name: Migrate rai6 storage pool to raid0
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_min_usable_capacity: 3.4
+ criteria_size_unit: tb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 3738339534438 }}"
+ msg: "raid6 storage pool failed to migrate to raid0."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid6_storage`]') }}"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid6_storage
+
+# raidDiskPool
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Rerun simple storage pool creation.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool to a minimum usable capacity of 3000gb
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 3000
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- name: Verify storage pool was extended
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 3221225472000 }}"
+ msg: "Simple storage pool failed to be extended."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raidDiskPool' and
+ (current_drives.json | json_query(count_query) | length) == 12}}"
+ msg: "raidDiskPool storage pool failed to be extended with 12 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
+
+
+# raid0 secured
+- name: Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Extend storage pool to 2400gb minimum usable capacity.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 2400
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 2576980377600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool using raid0 failed to be extended to a minimum of 2400gb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Expand simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid0' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to be extended to 6 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid0 storage pool to raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to migrate to raid6"
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ erase_secured_drives: yes
+ loop:
+ - raid0_storage
+
+
+# raidDiskPool secured
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Rerun simple storage pool creation.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool to a minimum usable capacity of 3000gb
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 3000
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- name: Verify storage pool was extended
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 3221225472000 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be extended."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raidDiskPool' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 12 }}"
+ msg: "raidDiskPool storage pool failed to be extended with 12 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
+
+
+# raidDiskPool set reserve drive count
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 11
+ reserve_drive_count: 1
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 1 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Change disk pool reserve drive count.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ reserve_drive_count: 2
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=30
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 2 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+# erase drives on storage pool deletion
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ erase_secured_drives: yes
+ register: results
+
+- name: Create simple storage pool using raidDiskPool with capacity and reserve count specified.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 8000
+ criteria_size_unit: gb
+ reserve_drive_count: 2
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ (item.totalRaidedSpace | int) >= 3221225472000 and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 2 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Integration cleanup
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ erase_secured_drives: yes
+ register: results
+- na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml
new file mode 100644
index 00000000..79830c3d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml
@@ -0,0 +1,127 @@
+# Test code for the na_santricity_syslog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_target module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Add initial syslog server settings (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Configure initial syslog server settings (no change)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server settings with different protocol (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server settings with different port (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 123
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server address (change, check_mode)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.200
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add another syslog server address (change)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.200
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 123
+ protocol: tcp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.200
+ port: 1514
+ protocol: tcp
+ components: ["auditLog"]
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml
new file mode 100644
index 00000000..fe6d91d3
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml
@@ -0,0 +1,768 @@
+# Test code for the na_santricity_volume module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# test setup
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool
+ - storage_pool2
+ - storage_pool3
+
+# Thick volume testing: create, delete, expand, change properties (read/write cache), expand and change properties,
+- name: Create raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid0
+
+- name: Delete volume in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: volume
+
+- name: Create volume in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 100
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Re-execute volume creation in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 100
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Update volume size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 200
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '214748364800' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- pause: seconds=15
+
+- name: Update volume properties
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 200
+ size_unit: gb
+ write_cache_enable: true
+ read_cache_enable: false
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '214748364800' and item.segmentSize == 131072 and
+ not item.cacheSettings.readCacheEnable and item.cacheSettings.writeCacheEnable}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Update volume properties and expand storage capabilities
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+# Workload tagging testing: create, utilize existing (name only, name with same attributes), modify attributes
+- name: Add workload tag (change, new workload tag)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key: volume_tag_value
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be created!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Repeat add workload tag (no change)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key: volume_tag_value
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed not to be changed"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Workload tag (no change, just using workload_name)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to not be modified!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Add workload tag (change, new attributes)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key2: volume_tag_value2
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be updated!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Remove workload tag from volume (change)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ item.metadata == []}}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be updated!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Delete workload tag
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads/{{ item }}"
+ method: DELETE
+ status_code: 204
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`].id') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: storage_pool
+
+
+# *** Thin volume testing (May not work with simulator) ***
+- name: Create dynamic disk pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 2
+ criteria_size_unit: tb
+
+- name: Create thin volume
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 131072
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+- name: (Rerun) Create thin volume
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 131072
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+
+- name: Expand thin volume's virtual size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 262144
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+
+- name: Expand thin volume's maximum repository size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 262144
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 2048
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '2199023255552' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+- name: Create dynamic disk pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool2
+ criteria_min_usable_capacity: 2
+ criteria_size_unit: tb
+- pause: seconds=15
+
+- name: Create second thin volume with manual expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 32
+ thin_volume_expansion_policy: manual
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '34359738368' and item.expansionPolicy == 'manual' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+
+- name: Create second thin volume with manual expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 288
+ thin_volume_max_repo_size: 288
+ thin_volume_expansion_policy: manual
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'manual' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+- name: Modify second thin volume to use automatic expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 288
+ thin_volume_max_repo_size: 288
+ thin_volume_expansion_policy: automatic
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool
+ - storage_pool2
+
+- name: Create raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid0
+
+# Thick volume expansion testing: wait and don't wait for operation to complete
+- name: Create raid 6 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool3
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid6
+
+- name: Delete volume in raid 6 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: volume
+
+- name: Create volume in raid 0 storage pool for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 1
+ size_unit: gb
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '1073741824' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Modify volume in raid 0 storage pool and wait for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 10
+ size_unit: gb
+ wait_for_initialization: True
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: expansion_state
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '10737418240' and item.segmentSize == 131072 and
+ expansion_state['json']['action'] == 'none'}}"
+ msg: "Volume expansion test failed."
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Modify volume in raid 0 storage pool and don't wait for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 100
+ size_unit: gb
+ wait_for_initialization: False
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: expansion_state
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072 and expansion_state['json']['action'] != 'none'}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool3 \ No newline at end of file