summaryrefslogtreecommitdiffstats
path: root/ansible_collections/netapp_eseries/santricity/tests
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/netapp_eseries/santricity/tests')
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml32
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml117
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml112
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml287
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml220
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml170
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml55
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml64
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml185
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml19
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml128
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml320
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml2
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml185
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml243
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml137
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml88
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml115
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml81
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml104
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml318
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml383
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml88
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml2
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml105
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml65
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml65
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml160
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml1038
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml127
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml768
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts.py194
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts_syslog.py151
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_asup.py318
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auditlog.py205
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auth.py488
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_client_certificate.py373
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_discover.py168
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_drive_firmware.py212
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_facts.py470
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_firmware.py494
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_global.py494
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_host.py434
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_hostgroup.py140
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ib_iser_interface.py159
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_interface.py239
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_target.py188
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ldap.py371
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_lun_mapping.py196
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_mgmt_interface.py513
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_nvme_interface.py220
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_drive_firmware_upload.py137
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_firmware_upload.py136
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_systems.py497
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_storagepool.py715
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_syslog.py128
-rw-r--r--ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_volume.py864
57 files changed, 14287 insertions, 0 deletions
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml
new file mode 100644
index 000000000..8292ee426
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/integration_config.yml
@@ -0,0 +1,32 @@
+# url and credentials - all santricity modules will use this information
+ssid: "1"
+base_url: https://192.168.1.100:8443/devmgr/v2/
+username: admin
+password: adminPass
+validate_cert: false
+
+# proxy url and credentials - modules that require special api testing will use this information
+proxy_ssid: "10"
+proxy_legacy_ssid: "20"
+proxy_base_url: https://192.168.1.200:8443/devmgr/v2/
+proxy_username: admin
+proxy_password: ""
+proxy_validate_cert: false
+
+# na_santricity_auth module variable requirements in addition to both embedded and proxy credentials
+expected_serial_with_proxy_legacy: "711214012345"
+expected_serial_with_proxy_embedded: "021633012345"
+expected_serial_without_proxy: "021628012345"
+proxy_discover_subnet: 192.168.1.0/24
+systems:
+ - ssid: 10 # should match proxy_ssid above
+ addresses: ["192.168.1.110"]
+ - ssid: 20 # should match proxy_legacy_ssid above
+ addresses: ["192.168.1.120"]
+
+
+# na_santricity_ldap module variable requirements
+#bind_user: "CN=bind_user,OU=accounts,DC=test,DC=example,DC=com"
+#bind_password: "bind_password"
+#server_url: "ldap://test.example.com:389"
+#search_base: "OU=users,DC=test,DC=example,DC=com"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml
new file mode 100644
index 000000000..a5463ea84
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts/tasks/main.yml
@@ -0,0 +1,117 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: NetApp Test ASUP module
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Disable alerts
+ na_santricity_alerts:
+ <<: *creds
+ state: disabled
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
+
+- name: Set the initial alerting settings (changed, check_mode)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail.example.com
+ sender: noreply@example.com
+ recipients:
+ - noreply@example.com
+ register: result
+ check_mode: true
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
+
+- name: Set the initial alerting settings (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail.example.com
+ sender: noreply@example.com
+ recipients:
+ - noreply@example.com
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and current_state['json']['alertingEnabled'] and
+ current_state['json']['emailServerAddress'] == 'mail.example.com' and
+ current_state['json']['emailSenderAddress'] == 'noreply@example.com' and
+ current_state['json']['recipientEmailAddresses'] == ['noreply@example.com'] }}"
+ msg: "Failed to enable alerts!"
+
+- name: Set to different alerting settings (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: enabled
+ server: mail2.example.com
+ sender: noreply2@example.com
+ recipients:
+ - noreply@example.com
+ - noreply2@example.com
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and current_state['json']['alertingEnabled'] and
+ current_state['json']['emailServerAddress'] == 'mail2.example.com' and
+ current_state['json']['emailSenderAddress'] == 'noreply2@example.com' and
+ (current_state['json']['recipientEmailAddresses'] == ['noreply@example.com', 'noreply2@example.com'] or
+ current_state['json']['recipientEmailAddresses'] == ['noreply2@example.com', 'noreply@example.com']) }}"
+ msg: "Failed to enable alerts!"
+
+- name: Disable alerts again (changed)
+ na_santricity_alerts:
+ <<: *creds
+ state: disabled
+ register: result
+- name: Get the current device alerts
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/device-alerts"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ register: current_state
+- name: Determine whether the current state is expected
+ assert:
+ that: "{{ result['changed'] and not current_state['json']['alertingEnabled'] }}"
+ msg: "Failed to disable alerts!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml
new file mode 100644
index 000000000..34de206e8
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_alerts_syslog/tasks/main.yml
@@ -0,0 +1,112 @@
+# Test code for the na_santricity_alerts_syslog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Set facts for na_santricity_alerts_syslog module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Delete all alert syslog servers
+ na_santricity_alerts_syslog:
+ <<: *creds
+
+- name: Add alert syslog servers (change, check_mode)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add alert syslog servers (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add alert syslog servers (no change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove one alert syslog server (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one alert syslog server (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ test: true
+ servers:
+ - address: "192.168.1.100"
+ - address: "192.168.2.100"
+ port: 514
+ - address: "192.168.3.100"
+ port: 1000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete all alert syslog servers (change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete all alert syslog servers (no change)
+ na_santricity_alerts_syslog:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml
new file mode 100644
index 000000000..fd66149f6
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_asup/tasks/main.yml
@@ -0,0 +1,287 @@
+# Test code for the na_santricity_asup module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Enable auto-support using default values
+ na_santricity_asup:
+ <<: *creds
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected default state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == 0 and
+ current.json.schedule.dailyMaxTime == 1439 }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']) }}"
+
+- name: Disable auto-support
+ na_santricity_asup:
+ <<: *creds
+ state: disabled
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support is disabled
+ assert:
+ that: "{{ not current.json.asupEnabled }}"
+ msg: "Auto-support failed to be disabled"
+
+- name: Enable auto-support using specific values
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 22
+ end: 24
+ days:
+ - friday
+ - saturday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (22 * 60) and
+ current.json.schedule.dailyMaxTime == (24 * 60 - 1) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['friday', 'saturday']) }}"
+
+- name: Auto auto-support schedule
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (0 * 60) and
+ current.json.schedule.dailyMaxTime == (5 * 60) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
+
+- name: Repeat auto-support schedule change to verify idempotency
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: true
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+ register: result
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and
+ current.json.onDemandEnabled and
+ current.json.remoteDiagsEnabled and
+ current.json.schedule.dailyMinTime == (0 * 60) and
+ current.json.schedule.dailyMaxTime == (5 * 60) }}"
+ msg: "Unexpected auto-support state"
+- name: Validate auto-support schedule
+ assert:
+ that: "{{ item in current.json.schedule.daysOfWeek }}"
+ msg: "{{ item }} is missing from the schedule"
+ loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
+- name: Validate change was not detected
+ assert:
+ that: "{{ not result.changed }}"
+ msg: "Invalid change was detected"
+
+- name: Auto auto-support schedule
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ active: false
+ start: 0
+ end: 5
+ days:
+ - monday
+ - thursday
+ - sunday
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current.json.asupEnabled and not current.json.onDemandEnabled and not current.json.remoteDiagsEnabled }}"
+ msg: "Unexpected auto-support state"
+
+- name: Auto auto-support direct delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: http
+ routing_type: direct
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'http' }}"
+ msg: "Delievery method should be http!"
+
+- name: Auto auto-support direct delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: direct
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' }}"
+ msg: "Delievery method should be https!"
+
+- name: Auto auto-support proxy delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: proxy
+ proxy:
+ host: 192.168.1.1
+ port: 1000
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' and
+ current['json']['delivery']['proxyHost'] == '192.168.1.1' and
+ current['json']['delivery']['proxyPort'] == 1000 }}"
+ msg: "Delievery method should be https-proxy-host!"
+
+- name: Auto auto-support proxy-script delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: https
+ routing_type: script
+ proxy:
+ script: autosupport_script.sh
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'https' and
+ current['json']['delivery']['proxyScript'] == 'autosupport_script.sh' }}"
+ msg: "Delievery method should be https-proxy-script!"
+
+- name: Auto auto-support email delivery method
+ na_santricity_asup:
+ <<: *creds
+ state: enabled
+ method: email
+ email:
+ server: server@example.com
+ sender: noreply@example.com
+- name: Collect auto-support state information from the array
+ uri:
+ url: "{{ base_url }}device-asup"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+- name: Validate auto-support expected state
+ assert:
+ that: "{{ current['json']['delivery']['method'] == 'smtp' and
+ current['json']['delivery']['mailRelayServer'] == 'server@example.com' and
+ current['json']['delivery']['mailSenderAddress'] == 'noreply@example.com' }}"
+ msg: "Delievery method should be email!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml
new file mode 100644
index 000000000..424ba2e55
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auditlog/tasks/main.yml
@@ -0,0 +1,220 @@
+# Test code for the na_santricity_auditlog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Note: If audit-log is full then clear it before testing, since it can result in expect 422, symbol errors.
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "PROXY"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ proxy_embedded_credentials: &proxy_embedded_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+
+- name: Set audit log settings to the defaults
+ na_santricity_auditlog:
+ <<: *creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (change, check_mode)
+ na_santricity_auditlog:
+ <<: *creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (change)
+ na_santricity_auditlog:
+ <<: *creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
+
+- name: Set audit log settings to the defaults (proxy)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change, check_mode)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change)
+ na_santricity_auditlog:
+ <<: *proxy_creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ proxy_base_url }}audit-log/config"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
+
+- name: Set audit log settings to the defaults (proxy)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change, check_mode)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+ max_records: 50000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+ check_mode: true
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 50000 and
+ config['json']['auditLogLevel'] == 'writeOnly' and
+ config['json']['auditLogFullPolicy'] == 'overWrite' and
+ config['json']['auditLogWarningThresholdPct'] == 90 }}"
+ msg: "Config settings are not correct!"
+
+- name: Change audit log settings. (proxy) (change)
+ na_santricity_auditlog:
+ <<: *proxy_embedded_creds
+ max_records: 10000
+ log_level: all
+ full_policy: preventSystemAccess
+ threshold: 60
+ register: result
+- name: Retrieve current auditlog config settings
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/audit-log/config"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: config
+- name: Validate change status
+ assert:
+ that: "{{ result['changed'] and config['json']['auditLogMaxRecords'] == 10000 and
+ config['json']['auditLogLevel'] == 'all' and
+ config['json']['auditLogFullPolicy'] == 'preventSystemAccess' and
+ config['json']['auditLogWarningThresholdPct'] == 60 }}"
+ msg: "Config settings are not correct!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml
new file mode 100644
index 000000000..12c552520
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_auth/tasks/main.yml
@@ -0,0 +1,170 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Clear embedded, legacy, and passwords before executing integration tests!
+
+- name: Set initial credential variables
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ proxy_legacy_credentials: &proxy_legacy_creds
+ ssid: "{{ proxy_legacy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+
+# TODO: series of tests for embedded
+# Validate admin passwords are updated regardless of supplied api_password and current_admin_password options
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ minimum_password_length: 8
+ password: infiniti
+ user: admin
+
+- name: Set storage system's non-admin passwords (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ password: "{{ item }}_password"
+ user: "{{ item }}"
+ ignore_errors: true
+ loop: ["monitor", "support", "security", "storage"]
+
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ <<: *creds
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial admin password (embedded, changed)
+ na_santricity_auth:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: ""
+ validate_certs: "{{ validate_cert }}"
+ minimum_password_length: 8
+
+- name: Set proxy's initial password (proxy, changed)
+ na_santricity_auth:
+ ssid: proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ password: infiniti
+ user: admin
+
+# # TODO: series of tests for proxy
+- name: Add storage systems to proxy without passwords
+ na_santricity_proxy_systems:
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ systems: "{{ systems }}"
+ password: ""
+
+# Validate proxy system's admin passwords are updated regardless of current_admin_password options
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ <<: *proxy_creds
+ minimum_password_length: 8
+ current_admin_password: "" # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: infiniti
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ <<: *proxy_legacy_creds
+ minimum_password_length: 8
+ current_admin_password: "" # THIS NEEDS TO MATCH LEGACY STORAGE SYSTEM'S STORED-PASSWORD
+ password: infiniti
+ user: admin
+
+- pause: seconds=10
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: "{{ item }}_password"
+ user: "{{ item }}"
+ loop: ["monitor", "support", "security", "storage"]
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ ssid: "20"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ current_admin_password: infiniti # THIS NEEDS TO MATCH STORAGE SYSTEM'S STORED-PASSWORD
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system without embedded, changed)
+ na_santricity_auth:
+ ssid: proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 0
+ password: ""
+ user: admin
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: Proxy
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 8
+
+- name: Set storage system's initial password (proxy system with embedded, changed)
+ na_santricity_auth:
+ ssid: "10"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
+ minimum_password_length: 8
+
+- name: Remove storage system from proxy
+ na_santricity_proxy_systems:
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "" # THIS NEEDS TO MATCH PROXY'S PASSWORD
+ validate_certs: "{{ proxy_validate_cert }}"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml
new file mode 100644
index 000000000..9f3964d96
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_client_certificate/tasks/main.yml
@@ -0,0 +1,55 @@
+# Test code for the na_santricity_alerts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set credential facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ certificates:
+ - "/home/swartzn/ExampleRootCA.crt"
+ - "/home/swartzn/ExampleIssuingCA.crt"
+ - "/home/swartzn/ExampleClient.crt"
+
+- name: Remove certificates
+ na_santricity_client_certificate:
+ <<: *creds
+
+- name: Upload certificate (changed, check_mode)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+ check_mode: true
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to upload certificates to storage array."
+
+- name: Upload certificate (changed)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to upload certificates to storage array."
+
+- name: Repeat upload certificate (no change)
+ na_santricity_client_certificate:
+ <<: *creds
+ certificates: "{{ certificates }}"
+ register: result
+- assert:
+ that: "{{ not result['changed'] }}"
+ msg: "Failed not to make any changes."
+
+- name: Remove certificates
+ na_santricity_client_certificate:
+ <<: *creds
+ register: result
+- assert:
+ that: "{{ result['changed'] }}"
+ msg: "Failed to remove uploaded certificates" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml
new file mode 100644
index 000000000..38c18f977
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_discover/tasks/main.yml
@@ -0,0 +1,64 @@
+# Test code for the na_santricity_discover module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Discover storage systems using SANtricity Web Services Proxy
+ na_santricity_discover:
+ proxy_url: "{{ proxy_base_url }}"
+ proxy_username: "{{ proxy_username }}"
+ proxy_password: "{{ proxy_password }}"
+ proxy_validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ prefer_embedded: false
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_with_proxy_legacy %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == "" or api_url != proxy_base_url
+
+- name: Discover storage systems using SANtricity Web Services Proxy with a preference for embedded url
+ na_santricity_discover:
+ proxy_url: "{{ proxy_base_url }}"
+ proxy_username: "{{ proxy_username }}"
+ proxy_password: "{{ proxy_password }}"
+ proxy_validate_certs: "{{ proxy_validate_cert }}"
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ prefer_embedded: true
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_with_proxy_embedded %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == "" or api_url == proxy_base_url
+
+- name: Discover storage systems not using SANtricity Web Services Proxy (requires SANtricity version 11.60.2 or later)
+ na_santricity_discover:
+ subnet_mask: "{{ proxy_discover_subnet }}"
+ register: systems
+- name: find storage system
+ set_fact:
+ api_url: |-
+ {%- for system_serial in (systems["systems_found"].keys() | list) -%}
+ {%- if system_serial == expected_serial_without_proxy %}
+ {{- systems["systems_found"][system_serial]["api_urls"][0] -}}
+ {%- endif -%}
+ {%- endfor -%}
+- name: Verify storage system is found
+ fail:
+ msg: "Storage system was not discovered"
+ when: api_url == ""
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml
new file mode 100644
index 000000000..5559691dc
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_drive_firmware/tasks/main.yml
@@ -0,0 +1,185 @@
+# Test code for the na_santricity_drive_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Existing symbol issue: occasionally symbol will return 422 which causes Ansible to fail; however the drive firmware download will complete.
+# Work-around: Remove all storage provisioning before commencing test.
+
+- name: Set necessary credentials and other facts.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ firmware:
+ downgrade:
+ list:
+ - "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
+ - "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
+ check:
+ - firmware: "D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
+ drive: "PX04SVQ160"
+ version: "MSB6"
+ - firmware: "D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
+ drive: "ST1200MM0017"
+ version: "MS00"
+ upgrade:
+ list:
+ - "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603183_MS00_6600_001.dlp"
+ - "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_30602214_MS02_5600_002.dlp"
+ check:
+ - firmware: "D_PX04SVQ160_30603183_MS00_6600_001.dlp"
+ drive: "PX04SVQ160"
+ version: "MS00"
+ - firmware: "D_ST1200MM0017_30602214_MS02_5600_002.dlp"
+ drive: "ST1200MM0017"
+ version: "MS02"
+
+- name: Set drive firmware (baseline, maybe change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['downgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, change-checkmode)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+ check_mode: true
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (upgrade, no change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['upgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ not drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
+
+- name: Set drive firmware (downgrade, change)
+ netapp_e_drive_firmware:
+ <<: *creds
+ firmware: "{{ firmware['downgrade']['list'] }}"
+ wait_for_completion: true
+ ignore_inaccessible_drives: true
+ upgrade_drives_online: false
+ register: drive_firmware
+- pause: seconds=5
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/drives"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_drive_firmware
+- name: Validate change status
+ assert:
+ that: "{{ drive_firmware.changed }}"
+ msg: "Change status is incorrect."
+- name: Check if drive firmware is the expected versions
+ assert:
+ that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
+ (firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
+ (firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
+ firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
+ msg: "Drive firmware failed to update all drives"
+ loop: "{{ lookup('list', current_drive_firmware['json']) }}"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml
new file mode 100644
index 000000000..14cc43c62
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_facts/tasks/main.yml
@@ -0,0 +1,19 @@
+# Test code for the na_santricity_facts module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+- name: Retrieve facts from SANtricity Web Services Embedded
+ na_santricity_facts:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Retrieve facts from SANtricity Web Services Proxy
+ na_santricity_facts:
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml
new file mode 100644
index 000000000..6aff714cc
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_legacy_tests.yml
@@ -0,0 +1,128 @@
+# Test code for the na_santricity_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# TODO: MUST BE DOWNGRADE BEFORE EXECUTING INTEGRATION TO RCB_11.40.3R2_280x_5c7d81b3.dlp and N280X-842834-D02.dlp
+# loadControllerFirmware_MT swartzn@10.113.1.250 /home/swartzn/Downloads/RCB_11.40.3R2_280x_5c7d81b3.dlp /home/swartzn/Downloads/N280X-842834-D02.dlp
+
+# This integration test will validate upgrade functionality for firmware-only, firmware-and-nvsram, and check mode.
+- name: Set credentials and other facts
+ set_fact:
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_legacy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ path: "/home/swartzn/Downloads/"
+ upgrades:
+ - firmware: "RC_08405000_m3_e10_840_5600.dlp"
+ nvsram: "N5600-840834-D03.dlp"
+ expected_firmware_version: "08.40.50.00"
+ expected_nvsram_version: "N5600-840834-D03"
+ - firmware: "RC_08403000_m3_e10_840_5600.dlp"
+ nvsram: "N5600-840834-D03.dlp"
+ expected_firmware_version: "08.40.30.00"
+ expected_nvsram_version: "N5600-840834-D03"
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services Proxy (check_mode, changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+ check_mode: true
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_legacy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml
new file mode 100644
index 000000000..99827e1bb
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/firmware_tests.yml
@@ -0,0 +1,320 @@
+# Test code for the na_santricity_firmware module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# TODO: MUST BE DOWNGRADE BEFORE EXECUTING INTEGRATION TO RCB_11.40.3R2_280x_5c7d81b3.dlp and N280X-842834-D02.dlp
+# loadControllerFirmware_MT swartzn@10.113.1.250 /home/swartzn/Downloads/RCB_11.40.3R2_280x_5c7d81b3.dlp /home/swartzn/Downloads/N280X-842834-D02.dlp
+
+# This integration test will validate upgrade functionality for firmware-only, firmware-and-nvsram, and check mode.
+- name: Set credentials and other facts
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ proxy_credentials: &proxy_creds
+ ssid: "{{ proxy_ssid }}"
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ path: "/home/swartzn/Downloads/"
+ upgrades:
+ - firmware: "RCB_11.40.3R2_280x_5c7d81b3.dlp"
+ nvsram: "N280X-842834-D02.dlp"
+ expected_firmware_version: "08.42.30.05"
+ expected_nvsram_version: "N280X-842834-D02"
+ - firmware: "RCB_11.40.5_280x_5ceef00e.dlp"
+ nvsram: "N280X-842834-D02.dlp"
+ expected_firmware_version: "08.42.50.00"
+ expected_nvsram_version: "N280X-842834-D02"
+ - firmware: "RCB_11.50.2_280x_5ce8501f.dlp"
+ nvsram: "N280X-852834-D02.dlp"
+ expected_firmware_version: "08.52.00.00"
+ expected_nvsram_version: "N280X-852834-D02"
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-no change, firmware only)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return unchanged."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (no change, firmware only)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+ check_mode: true
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (change, firmware)
+ na_santricity_firmware:
+ <<: *creds
+ nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
+ msg: "Unexpected firmware version. {{ current_firmware['json'][0] }} != {{ upgrades[1]['expected_firmware_version'] }}"
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
+ msg: "Unexpected nvsram version. {{ current_nvsram['json'][0] }} != {{ upgrades[1]['expected_nvsram_version'] }}"
+
+- name: Perform firmware upgrade using the Web Services Proxy (changed, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-unchanged, firmware)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == False }}"
+ msg: "Failed to return unchanged."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware and nvsram)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ check_mode: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
+
+- name: Perform firmware upgrade using the Web Services REST API (changed, firmware and nvsram)
+ na_santricity_firmware:
+ <<: *proxy_creds
+ nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
+ firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
+ wait_for_completion: true
+ clear_mel_events: true
+ register: results
+- name: Retrieve current firmware version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_firmware
+- name: Retrieve current nvsram version
+ uri:
+ url: "{{ proxy_base_url }}storage-systems/{{ proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
+ user: "{{ proxy_username }}"
+ password: "{{ proxy_password }}"
+ validate_certs: no
+ register: current_nvsram
+- name: Verify change status
+ assert:
+ that: "{{ results.changed == True }}"
+ msg: "Failed to return changed."
+- name: Verify current firmware version
+ assert:
+ that: "{{ current_firmware['json'][0] == upgrades[2]['expected_firmware_version'] }}"
+ msg: "Failed to change the firmware version."
+- name: Verify current nvsram version
+ assert:
+ that: "{{ current_nvsram['json'][0] == upgrades[2]['expected_nvsram_version'] }}"
+ msg: "Failed to change the nvsram version."
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml
new file mode 100644
index 000000000..15edc5200
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_firmware/tasks/main.yml
@@ -0,0 +1,2 @@
+- include_tasks: firmware_tests.yml
+- include_tasks: firmware_legacy_tests.yml
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml
new file mode 100644
index 000000000..9d6e6df92
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_global/tasks/main.yml
@@ -0,0 +1,185 @@
+# Test code for the nac_sancticity_global module.
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- include_vars: "../../integration_config.yml"
+
+- name: Set initial global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname01
+ cache_block_size: 32768
+ cache_flush_threshold: 80
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: linux dm-mp
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Repeat initial global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname01
+ cache_block_size: 32768
+ cache_flush_threshold: 80
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: linux dm-mp
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ not result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change global settings (check-mode)
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname02
+ cache_block_size: 8192
+ cache_flush_threshold: 60
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: windows
+ check_mode: true
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname01' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 32768 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 80 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 28 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change global settings
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname02
+ cache_block_size: 8192
+ cache_flush_threshold: 60
+ automatic_load_balancing: disabled
+ host_connectivity_reporting: disabled
+ default_host_type: windows
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname02' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ not graph['json'][0]['autoLoadBalancingEnabled'] and
+ not graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Turn on autoload balancing which should force enable host connection reporting
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ automatic_load_balancing: enabled
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname02' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ graph['json'][0]['autoLoadBalancingEnabled'] and
+ graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
+
+- name: Change array name only
+ na_santricity_global:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ name: arrayname03
+ register: result
+- name: Retrieve the current array graph
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/graph/xpath-filter?query=/sa"
+ register: graph
+- name: Validate initial global settings
+ assert:
+ that: "{{ result.changed and
+ graph['json'][0]['saData']['storageArrayLabel'] == 'arrayname03' and
+ graph['json'][0]['cache']['cacheBlkSize'] == 8192 and
+ graph['json'][0]['cache']['demandFlushThreshold'] == 60 and
+ graph['json'][0]['autoLoadBalancingEnabled'] and
+ graph['json'][0]['hostConnectivityReportingEnabled'] and
+ graph['json'][0]['defaultHostTypeIndex'] == 1 }}"
+ msg: "Failed to set initial global settings"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml
new file mode 100644
index 000000000..cb460a9ea
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_host/tasks/main.yml
@@ -0,0 +1,243 @@
+# Test code for the na_santricity_host module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Create iSCSI host
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+
+- name: Create FC host
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+
+- name: Attempt to change FC host port using different port case (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: FC
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: FC
+ label: fc_p2
+ port: "01:23:45:67:89:1A:BC:DE"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create iSCSI host (no change)
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (change, check_mode)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ check_mode: true
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Change FC host label to uppercase (change)
+ na_santricity_host:
+ <<: *creds
+ name: Linux_FC_Host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Change FC host port labels to uppercase (change)
+ na_santricity_host:
+ <<: *creds
+ name: Linux_FC_Host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: FC_P1
+ port: "0x1122334455667788"
+ - type: fc
+ label: FC_P2
+ port: "01:23:45:67:89:1a:bc:de"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Create FC host with an used port (no change)
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc2_host
+ host_type: Linux dm-mp
+ force_port: true
+ ports:
+ - type: fc
+ label: fc2_p1
+ port: "0x1122334455667788"
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete iSCSI host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete FC host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: Linux_FC_Host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete second FC host (changed)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc2_host
+ register: results
+- name: Verify changes were made
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete iSCSI host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete second FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: Linux_FC_Host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Delete FC host (no change)
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc2_host
+ register: results
+- name: Verify no changes were made
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml
new file mode 100644
index 000000000..8a2af77dc
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_hostgroup/tasks/main.yml
@@ -0,0 +1,137 @@
+# Test code for the na_santricity_hostgroup module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Setup hosts for the groups
+ block:
+ - name: Create iSCSI host
+ na_santricity_host:
+ <<: *creds
+ name: windows_iscsi_host
+ host_type: Windows
+ ports:
+ - type: iscsi
+ label: iscsi_p1
+ port: iqn.windows.host.com.1
+ - type: iscsi
+ label: iscsi_p2
+ port: iqn.windows.host.com.2
+ - name: Create FC host
+ na_santricity_host:
+ <<: *creds
+ name: linux_fc_host
+ host_type: Linux dm-mp
+ ports:
+ - type: fc
+ label: fc_p1
+ port: "0x1122334455667788"
+ - type: fc
+ label: fc_p2
+ port: "01:23:45:67:89:1a:bc:de"
+
+- name: Create host group and add one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Create host group and add one host (no change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change, check_mode)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change, check_mode)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - windows_iscsi_host
+ - linux_fc_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove one host (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ name: hostgroup_test
+ hosts:
+ - linux_fc_host
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete host group (change)
+ na_santricity_hostgroup:
+ <<: *creds
+ state: absent
+ name: hostgroup_test
+
+- name: Delete hosts for the groups
+ block:
+ - name: Delete iSCSI host
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: windows_iscsi_host
+ register: results
+
+ - name: Delete FC host
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: linux_fc_host
+ register: results
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml
new file mode 100644
index 000000000..d2d8142b4
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ib_iser_interface/tasks/main.yml
@@ -0,0 +1,88 @@
+# Test code for the na_santricity_ib_iser_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_ib_iser_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ interface_a1_ip: &a1_ip 192.168.1.101
+ interface_a2_ip: &a2_ip 192.168.2.101
+
+- name: Set the initial ib_iser interfaces
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+
+- name: Repeat the initial ib_iser interfaces (no change)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+- name: Verify no changes were made
+ assert:
+ that: "{{ not item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial ib_iser interfaces (changed, check_mode)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+ check_mode: true
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial ib_iser interfaces (changed)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Revert to the initial ib_iser interfaces (changed)
+ na_santricity_ib_iser_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", *a1_ip]
+ - ["B", "1", *a2_ip]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml
new file mode 100644
index 000000000..38b6faba1
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_interface/tasks/main.yml
@@ -0,0 +1,115 @@
+# Test code for the na_santricity_iscsi_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_interface module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Set controller iSCSI interfaces to DHCP
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: "{{ item }}"
+ port: 1
+ config_method: dhcp
+ mtu: 1500
+ loop: ["A", "B"]
+
+- name: Set controller A iSCSI interface to static (change, check_mode)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Set controller A iSCSI interface to static (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Set controller A iSCSI interface to static (no change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller B iSCSI interface to static (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: B
+ port: 1
+ config_method: static
+ address: 192.168.1.200
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 1500
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller A iSCSI interface MTU to 9000 (change)
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: A
+ port: 1
+ config_method: static
+ address: 192.168.1.100
+ subnet_mask: 255.255.255.0
+ gateway: 192.168.1.1
+ mtu: 9000
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Set controller iSCSI interfaces to DHCP
+ na_santricity_iscsi_interface:
+ <<: *creds
+ controller: "{{ item }}"
+ port: 1
+ config_method: dhcp
+ mtu: 1500
+ loop: ["A", "B"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml
new file mode 100644
index 000000000..b259ec878
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_iscsi_target/tasks/main.yml
@@ -0,0 +1,81 @@
+# Test code for the na_santricity_iscsi_target module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_target module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Set initial iSCSI target state
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: false
+ unnamed_discovery: false
+ chap_secret: "chappySecret"
+
+- name: Clear chap secret
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: false
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Make iSCSI target pingable (change, check_mode)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target pingable (change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target pingable (no change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: false
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Make iSCSI target discoverable (change)
+ na_santricity_iscsi_target:
+ <<: *creds
+ name: eseries_storage_iscsi_target
+ ping: true
+ unnamed_discovery: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml
new file mode 100644
index 000000000..b7b57df11
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_ldap/tasks/main.yml
@@ -0,0 +1,104 @@
+# Test code for the nac_sancticity_ldap module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- include_vars: "../../integration_config.yml"
+
+- set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ ldap_info: &info
+ bind_user: "{{ bind_user }}"
+ bind_password: "{{ bind_password }}"
+ server_url: "{{ server_url }}"
+ search_base: "{{ search_base }}"
+ role_mappings:
+ - ".*":
+ - storage.admin
+ - security.admin
+ - support.admin
+ - storage.monitor
+ - ".*":
+ - storage.monitor
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: disabled
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: disabled
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Define a default LDAP domain, utilizing defaults where possible (changed, check_mode)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ check_mode: true
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Define a default LDAP domain, utilizing defaults where possible (changed)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Define a default LDAP domain, utilizing defaults where possible (no change)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[0] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Define a default LDAP domain, utilizing defaults where possible (change)
+ na_santricity_ldap:
+ <<: *creds
+ <<: *info
+ state: present
+ identifier: test1
+ role_mappings: "{{ role_mappings[1] }}"
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Delete default LDAP domain
+ na_santricity_ldap:
+ <<: *creds
+ state: absent
+ identifier: test1
+ register: results
+- name: Verify LDAP changes
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml
new file mode 100644
index 000000000..37955fbd2
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_lun_mapping/tasks/main.yml
@@ -0,0 +1,318 @@
+# Test code for the na_santricity_lun_mapping module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# ****************************************************
+# *** Setup test hosts, storage pools, and volumes ***
+# ****************************************************
+- name: Create host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host_mapping_host
+ host_type: 27
+- na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host1
+ host_type: 27
+- na_santricity_host:
+ <<: *creds
+ state: present
+ name: test_host2
+ host_type: 27
+- name: Create storage pool for host mapping
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: test_host_mapping_storage_pool
+ raid_level: raid0
+ criteria_min_usable_capacity: 1
+- name: Create volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: test_host_mapping_volume
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Create volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: test_host_mapping_volume2
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+
+# **********************************************
+# *** Create new lun between host and volume ***
+# **********************************************
+- name: Create na_santricity_lun_mapping
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# QUICK VERIFICATION OF MISMATCHING TARGET/TARGET_TYPE - GOOD
+#- name: Create na_santricity_lun_mapping
+# na_santricity_lun_mapping:
+# <<: *creds
+# state: present
+# target: test_host_mapping_host
+# volume: test_host_mapping_volume
+# lun: 100
+# target_type: group
+# register: result
+#
+#- pause: seconds=30
+# **************************************************************
+# *** Repeat previous lun creation play and verify unchanged ***
+# **************************************************************
+- name: Repeat lun creation
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] and result.changed==False }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ****************************************************************
+# *** Move existing lun to default target and verify unchanged ***
+# ****************************************************************
+- name: Move lun to default target
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *****************************************************************
+# *** Move existing lun to specific target and verify unchanged ***
+# *****************************************************************
+- name: Move lun to default target
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *******************************************
+# *** Modify a volume mapping's lun value ***
+# *******************************************
+- name: Change volume mapping's lun value
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ lun: 100
+ register: result
+
+- pause: seconds=15
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+- name: Verify mapping fails when lun already in use on existing host object
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume2
+ lun: 100
+ register: result
+ ignore_errors: True
+
+- pause: seconds=15
+
+- assert:
+ that: "{{ not result.changed }}"
+ msg: "Lun succeeded when it should have failed."
+ loop: "{{ lookup('list', current.json)}}"
+
+- name: Verify mapping succeeds when the same lun is used on multiple host objects.
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: present
+ target: test_host1
+ volume: test_host_mapping_volume2
+ lun: 100
+ register: result
+
+- pause: seconds=15
+
+- assert:
+ that: "{{ result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# *************************************************************************************************
+# *** Verify that exact mapping details but different lun results in an unchanged configuration ***
+# *************************************************************************************************
+- name: Verify that exact mapping details but different lun results in an unchanged configuration
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: absent
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ lun: 99
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ item['mapped'] and not result.changed }}"
+ msg: "Lun failed to be unchanged."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ********************************
+# *** Delete newly created lun ***
+# ********************************
+- name: Delete lun creation
+ na_santricity_lun_mapping:
+ <<: *creds
+ state: absent
+ target: test_host_mapping_host
+ volume: test_host_mapping_volume
+ register: result
+
+- name: Verify lun mapping
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current
+
+- assert:
+ that: "{{ not item['mapped'] }}"
+ msg: "Lun failed to be created."
+ loop: "{{ lookup('list', current.json)}}"
+
+# ********************************************************
+# *** Tear down test hosts, storage pools, and volumes ***
+# ********************************************************
+- name: Delete volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_volume
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Delete volume for host mapping
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_volume2
+ storage_pool_name: test_host_mapping_storage_pool
+ size: 1
+- name: Delete storage pool for host mapping
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_storage_pool
+ raid_level: raid0
+ criteria_min_usable_capacity: 1
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host_mapping_host
+ host_type_index: 27
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host2
+ host_type_index: 27
+- name: Delete host for host mapping
+ na_santricity_host:
+ <<: *creds
+ state: absent
+ name: test_host1
+ host_type_index: 27 \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml
new file mode 100644
index 000000000..15aebf4f9
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_mgmt_interface/tasks/main.yml
@@ -0,0 +1,383 @@
+# Test code for the nac_sancticity_mgmt_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# Note: Ensure controller B has IPv6 enabled, otherwise the first task will fail.
+
+- include_vars: "../../integration_config.yml"
+
+- set_fact:
+ controller_a: '070000000000000000000001'
+ controller_b: '070000000000000000000002'
+ original_channel_a1_info: &channel_a1_info
+ state: enabled
+ address: 10.113.1.192
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+ config_method: static
+ dns_config_method: static
+ dns_address: 10.193.0.250
+ dns_address_backup: 10.192.0.250
+ ntp_config_method: static
+ ntp_address: 216.239.35.0
+ ntp_address_backup: 216.239.35.4
+ ssh: true
+ original_channel_b1_info: &channel_b1_info
+ state: enabled
+ address: 10.113.1.193
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+ config_method: static
+ dns_config_method: static
+ dns_address: 10.193.0.250
+ dns_address_backup: 10.192.0.250
+ ntp_config_method: static
+ ntp_address: 216.239.35.0
+ ntp_address_backup: 216.239.35.4
+ ssh: true
+ address_info_list: &test_info
+ address: 10.113.1.251
+ subnet_mask: 255.255.255.0
+ gateway: 10.113.1.1
+
+- name: Set controller A port 1 to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ controller: A
+ port: "1"
+ config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url.replace(original_channel_a1_info['address'], original_channel_b1_info['address']) }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller A port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_a or item['channel'] != 1) or item['ipv4AddressConfigMethod'] == 'configDhcp' }}"
+ msg: "Failed to set controller A port 1 to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Restore controller A port 1 to static
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url.replace(original_channel_a1_info['address'], original_channel_b1_info['address']) }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ controller: A
+ port: "1"
+ <<: *channel_a1_info
+
+- name: Disable controller B port 1
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "disabled"
+ port: "1"
+ controller: B
+
+- name: Set controller B port 1 to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_b or item['channel'] != 1) or item['ipv4AddressConfigMethod'] == 'configDhcp' }}"
+ msg: "Failed to set controller B port 1 to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Set controller B port 1 to static ip address (changed, check_mode)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ check_mode: true
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static ip address
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ipv4AddressConfigMethod'] == 'configDhcp') }}"
+ msg: "Failed to set controller B port 1 to static ip address!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: Set controller B port 1 to static ip address (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static ip address
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['ipv4AddressConfigMethod'] == 'configStatic' and
+ item['ipv4Address'] == address_info_list['address'] and
+ item['ipv4SubnetMask'] == address_info_list['subnet_mask'] and
+ item['ipv4GatewayAddress'] == address_info_list['gateway'])) }}"
+ msg: "Failed to set controller B port 1 to static ip address!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 dns setting to dhcp
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ dns_config_method: dhcp
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['dnsProperties']['acquisitionProperties']['dnsAcquisitionType'] == 'dhcp') }}"
+ msg: "Failed to set controller B port 1 dns setting to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 dns is set to static (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ dns_config_method: static
+ dns_address: 192.168.1.1
+ dns_address_backup: 192.168.1.2
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 dns is set to static
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['dnsProperties']['acquisitionProperties']['dnsAcquisitionType'] == 'stat') and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][0]['addressType'] == 'ipv4' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][0]['ipv4Address'] == '192.168.1.1' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][1]['addressType'] == 'ipv4' and
+ item['dnsProperties']['acquisitionProperties']['dnsServers'][1]['ipv4Address'] == '192.168.1.2') }}"
+ msg: "Failed to set controller B port 1 dns setting to static!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: disable controller B port 1 ntp settings (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: disabled
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ (item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'disabled' }}"
+ msg: "Failed to disable controller B port 1 ntp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 ntp setting to dhcp (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: dhcp
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to dhcp
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'dhcp') }}"
+ msg: "Failed to set controller B port 1 ntp setting to dhcp!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: set controller B port 1 ntp setting to static (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ state: "enabled"
+ port: "1"
+ controller: B
+ config_method: static
+ <<: *test_info
+ ntp_config_method: static
+ ntp_address: 192.168.1.1
+ ntp_address_backup: 192.168.1.2
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/configuration/ethernet-interfaces"
+ register: interfaces
+- name: Validate controller B port 1 is set to static
+ assert:
+ that: "{{ result['changed'] and
+ ((item['controllerRef'] != controller_b or item['channel'] != 1) or
+ (item['ntpProperties']['acquisitionProperties']['ntpAcquisitionType'] == 'stat') and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['addrType'] == 'ipvx' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['ipvxAddress']['addressType'] == 'ipv4' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][0]['ipvxAddress']['ipv4Address'] == '192.168.1.1' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['addrType'] == 'ipvx' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['ipvxAddress']['addressType'] == 'ipv4' and
+ item['ntpProperties']['acquisitionProperties']['ntpServers'][1]['ipvxAddress']['ipv4Address'] == '192.168.1.2') }}"
+ msg: "Failed to set controller B port 1 ntp setting to static!"
+ loop: "{{ lookup('list', interfaces['json']) }}"
+
+- name: disable controller B ssh
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ ssh: false
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/controllers"
+ register: controllers
+- name: Validate controller B ssh is enabled
+ assert:
+ that: "{{ item['controllerRef'] != controller_b or not item['networkSettings']['remoteAccessEnabled'] }}"
+ msg: "Failed to disable controller B ssh!"
+ loop: "{{ lookup('list', controllers['json']) }}"
+
+- name: enable controller B ssh (changed)
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ ssh: true
+ register: result
+- name: Retrieve the current management interfaces
+ uri:
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ url: "{{ base_url }}storage-systems/{{ ssid }}/controllers"
+ register: controllers
+- name: Validate controller B ssh is enabled
+ assert:
+ that: "{{ result['changed'] and (item['controllerRef'] != controller_b or item['networkSettings']['remoteAccessEnabled']) }}"
+ msg: "Failed to set controller B port 1 ntp setting to static!"
+ loop: "{{ lookup('list', controllers['json']) }}"
+
+- name: Restore controller B port 1 settings
+ netapp_eseries.santricity.na_santricity_mgmt_interface:
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+ port: "1"
+ controller: B
+ <<: *channel_b1_info
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml
new file mode 100644
index 000000000..260f3d7ff
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/ib.yml
@@ -0,0 +1,88 @@
+# Test code for the na_santricity_nvme_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_nvme_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: 1
+ api_url: https://192.168.1.100:8443/devmgr/v2/
+ api_username: admin
+ api_password: adminpassword
+ validate_certs: false
+ interface_a1_ip: 192.168.1.1
+ interface_b1_ip: 192.168.2.1
+
+- name: Set the initial nvme interfaces
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+
+- name: Repeat the initial nvme interfaces (no change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ not item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial nvme interfaces (changed, check_mode)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+ check_mode: true
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Change the initial nvme interfaces (changed)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "192.168.3.230"]
+ - ["B", "1", "192.168.3.231"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}"
+
+- name: Revert to the initial nvme interfaces (changed)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: "{{ item[0] }}"
+ channel: "{{ item[1] }}"
+ address: "{{ item[2] }}"
+ register: results
+ loop:
+ - ["A", "1", "{{ interface_a1_ip }}"]
+ - ["B", "1", "{{ interface_b1_ip }}"]
+- name: Verify no changes were made
+ assert:
+ that: "{{ item['changed'] }}"
+ msg: "Unexpected results!"
+ loop: "{{ lookup('list', results['results']) }}" \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml
new file mode 100644
index 000000000..82f5ba168
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/main.yml
@@ -0,0 +1,2 @@
+- include_tasks: ib.yml
+- include_tasks: roce.yml
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml
new file mode 100644
index 000000000..70bfe55d4
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_nvme_interface/tasks/roce.yml
@@ -0,0 +1,105 @@
+# Test code for the na_santricity_nvme_interface module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_nvme_interface module test
+ set_fact:
+ credentials: &creds
+ ssid: 1
+ api_url: https://192.168.1.100:8443/devmgr/v2/
+ api_username: admin
+ api_password: adminpassword
+ validate_certs: false
+ original_interface: &iface
+ address: 192.168.131.101
+ subnet_mask: 255.255.255.0
+ gateway: 0.0.0.0
+
+- name: Ensure NVMeoF interfaces are properly configured.
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: dhcp
+ mtu: 9000
+ speed: 25
+
+- name: Ensure NVMeoF interfaces are properly configured (no change).
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: dhcp
+ mtu: 9000
+ speed: 25
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Ensure NVMe interfaces are properly configured. (change, check_mode)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Ensure NVMe interfaces are properly configured. (change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Ensure NVMe interfaces are properly configured. (no change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ controller: A
+ channel: 1
+ config_method: static
+ address: 192.168.130.200
+ subnet_mask: 255.255.254.0
+ gateway: 192.168.130.1
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Ensure NVMeoF interfaces are properly configured. (change)
+ na_santricity_nvme_interface:
+ <<: *creds
+ <<: *iface
+ controller: A
+ channel: 1
+ config_method: static
+ mtu: 1500
+ speed: auto
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml
new file mode 100644
index 000000000..c261abffa
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_drive_firmware_upload/tasks/main.yml
@@ -0,0 +1,65 @@
+# Test code for the na_santricity_proxy_drive_firmware_upload module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Test na_santricity_proxy_drive_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ firmware:
+ - /home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603182_MS00_5600_001.dlp
+ - /home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603299_MSB6_224C_705.dlp
+
+- name: Clear any existing proxy drive firmware
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+
+- name: Clear any existing proxy drive firmware (no changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify all drive firmware has been removed
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed, check_mode)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+ check_mode: true
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (no changed)
+ na_santricity_proxy_drive_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml
new file mode 100644
index 000000000..d4b9f02dc
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_firmware_upload/tasks/main.yml
@@ -0,0 +1,65 @@
+# Test code for the na_santricity_proxy_firmware_upload module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Test na_santricity_proxy_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ firmware:
+ - /home/swartzn/Downloads/N5600-840834-D03.dlp
+ - /home/swartzn/Downloads/RC_08405000_m3_e10_840_5600.dlp
+
+- name: Clear any existing proxy drive firmware
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+
+- name: Clear any existing proxy drive firmware (no changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify all drive firmware has been removed
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed, check_mode)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+ check_mode: true
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Add drive firmware to proxy (changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ firmware: "{{ firmware }}"
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Drive firmware exists!"
+
+- name: Remove drive firmware to proxy (no changed)
+ na_santricity_proxy_firmware_upload:
+ <<: *creds
+ register: results
+- name: Verify drive firmware has been added
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Drive firmware exists!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml
new file mode 100644
index 000000000..1475cda99
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_proxy_systems/tasks/main.yml
@@ -0,0 +1,160 @@
+# Test code for the na_santricity_proxy_systems module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+
+# NOTE: Running this test back-to-back can result in a 10 minute lock-out
+
+- name: Test na_santricity_proxy_firmware_upload module
+ set_fact:
+ credentials: &creds
+ api_url: "{{ proxy_base_url }}"
+ api_username: "{{ proxy_username }}"
+ api_password: "{{ proxy_password }}"
+ validate_certs: "{{ proxy_validate_cert }}"
+ subnet: 192.168.1.10/24
+ small_subnet: 192.168.1.10/31 # Be sure to know the systems included in this subnet since they will be discovered and not specified.
+ systems:
+ - ssid: "10"
+ serial: "021633035190"
+ password: "password"
+ - ssid: "20"
+ serial: "711214000794"
+ password: "password"
+
+- name: Ensure no systems have been added.
+ na_santricity_proxy_systems:
+ <<: *creds
+
+- name: Add multiple systems using serial numbers and a common password (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add multiple systems using serial numbers and a common password (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add multiple systems using serial numbers and a common password (no change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ password: "{{ systems[0]['password'] }}"
+ systems: |-
+ {%- set output=[] %}
+ {%- for system in systems %}
+ {%- if output.append({"serial": system["serial"]}) %}{%- endif %}
+ {%- endfor %}
+ {{ output }}
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add multiple systems using serial numbers (no change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ subnet }}"
+ systems: "{{ systems }}"
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add any other available system on the subnet (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ subnet_mask: "{{ small_subnet }}"
+ add_discovered_systems: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change, check_mode)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+ check_mode: true
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Remove all systems. (change)
+ na_santricity_proxy_systems:
+ <<: *creds
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml
new file mode 100644
index 000000000..664df5951
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_storagepool/tasks/main.yml
@@ -0,0 +1,1038 @@
+# Test code for the na_santricity_storagepool module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+#
+# Raid levels tested: raid0, raid1, raid5, raid6, disk pool
+# Actions covered: create w/capacity, create w/drive count, repeat create (no changes), extend w/capacity,
+# extend w/drive count, delete, migrate raid levels (raid0->raid6, 1->5, 5->1, 6->0),
+# secure pool for raid0, erasing drives on creation, erasing drives on deletion,
+# setting reserve drive count for ddp,
+
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# Ensure that test starts without storage pools
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ erase_secured_drives: yes
+ name: "{{ item }}"
+ loop:
+ - raid0_storage
+ - raid1_storage
+ - raid5_storage
+ - raid6_storage
+ - raidDiskPool_storage
+
+# Raid0
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid0 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid0 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Extend storage pool to 2400gb minimum usable capacity.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 2400
+ criteria_size_unit: gb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 2576980377600 }}"
+ msg: "raid0 storage pool using raid0 failed to be extended to a minimum of 2400gb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Expand simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid0' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to be extended to 6 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid0 storage pool to raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to migrate to raid6"
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid0_storage
+
+
+# Raid1
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid1' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid1 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid1_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid1' and (item.totalRaidedSpace | int) >= 1503238553600 }}"
+ msg: "raid1 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid1_storage`]') }}"
+
+- name: Expand simple storage pool using raid1.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_drive_count: 6
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid1' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid1 storage pool failed to be extended."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid1 storage pool to raid5
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid1_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid1 storage pool failed to migrate to raid5."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid1_storage
+
+
+# Raid5
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid5 storage pool failed to be created."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: (Rerun) Create simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 6
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ not results.changed and results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid5 storage pool failed not to be modified."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Expand simple storage pool using raid5.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 8
+ raid_level: raid5
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid5' and
+ (current_drives.json | json_query(count_query) | length) == 8}}"
+ msg: "raid5 storage pool failed to be modified to 8 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid5 storage pool to raid1
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid5_storage
+ criteria_drive_count: 8
+ raid_level: raid1
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid1' and
+ (current_drives.json | json_query(count_query) | length) == 8}}"
+ msg: "raid5 storage pool failed to migrate to raid1."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid5_storage
+
+
+# raid6
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_drive_count: 5
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and
+ (current_drives.json | json_query(count_query) | length) == 5}}"
+ msg: "raid6 storage pool failed to be created with 5 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Extend simple storage pool using raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_min_usable_capacity: 3.4
+ criteria_size_unit: tb
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid6' and (item.totalRaidedSpace | int) >= 3738339534438 }}"
+ msg: "raid6 storage pool failed to be extended to a minimum of 3.4tb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid6_storage`]') }}"
+
+- name: Migrate rai6 storage pool to raid0
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid6_storage
+ criteria_min_usable_capacity: 3.4
+ criteria_size_unit: tb
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 3738339534438 }}"
+ msg: "raid6 storage pool failed to migrate to raid0."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid6_storage`]') }}"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - raid6_storage
+
+# raidDiskPool
+# Create, rerun, extend, and modify raid level.
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Rerun simple storage pool creation.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool to a minimum usable capacity of 3000gb
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 3000
+ criteria_size_unit: gb
+ raid_level: raidDiskPool
+ register: results
+- name: Verify storage pool was extended
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 3221225472000 }}"
+ msg: "Simple storage pool failed to be extended."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raidDiskPool' and
+ (current_drives.json | json_query(count_query) | length) == 12}}"
+ msg: "raidDiskPool storage pool failed to be extended with 12 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
+
+
+# raid0 secured
+- name: Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: (Repeat) Create simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 1400
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 1503238553600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Extend storage pool to 2400gb minimum usable capacity.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_min_usable_capacity: 2400
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raid0' and (item.totalRaidedSpace | int) >= 2576980377600 and
+ item.securityType == 'enabled' }}"
+ msg: "raid0 storage pool using raid0 failed to be extended to a minimum of 2400gb."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raid0_storage`]') }}"
+
+- name: Expand simple storage pool using raid0.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid0
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid0' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to be extended to 6 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Migrate raid0 storage pool to raid6.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raid0_storage
+ criteria_drive_count: 6
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raid6
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raid6' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 6 }}"
+ msg: "raid0 storage pool failed to migrate to raid6"
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ erase_secured_drives: yes
+ loop:
+ - raid0_storage
+
+
+# raidDiskPool secured
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Rerun simple storage pool creation.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 2300
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ not results.changed and item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 2469606195200 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool to a minimum usable capacity of 3000gb
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 3000
+ criteria_size_unit: gb
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- name: Verify storage pool was extended
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and (item.totalRaidedSpace | int) >= 3221225472000 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be extended."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Extend simple storage pool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/drives"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_drives
+- assert:
+ that: "{{ results.raidLevel == 'raidDiskPool' and results.securityType == 'enabled' and
+ (current_drives.json | json_query(count_query) | length) == 12 }}"
+ msg: "raidDiskPool storage pool failed to be extended with 12 drives."
+ vars:
+ count_query: "[?currentVolumeGroupRef=='{{ results.volumeGroupRef }}'].currentVolumeGroupRef"
+
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
+
+
+# raidDiskPool set reserve drive count
+- name: Create simple storage pool using raidDiskPool.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 11
+ reserve_drive_count: 1
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 1 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Change disk pool reserve drive count.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_drive_count: 12
+ reserve_drive_count: 2
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=30
+- name: Verify storage pool was not modified
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 2 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed not to be modified."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+# erase drives on storage pool deletion
+- name: Remove simple storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ erase_secured_drives: yes
+ register: results
+
+- name: Create simple storage pool using raidDiskPool with capacity and reserve count specified.
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: raidDiskPool_storage
+ criteria_min_usable_capacity: 8000
+ criteria_size_unit: gb
+ reserve_drive_count: 2
+ secure_pool: yes
+ erase_secured_drives: yes
+ raid_level: raidDiskPool
+ register: results
+- pause: seconds=5
+- name: Verify storage pool was created
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/storage-pools"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ body_format: json
+ validate_certs: no
+ register: current_storage_pools
+- assert:
+ that: "{{ item.raidLevel == 'raidDiskPool' and
+ (item.totalRaidedSpace | int) >= 3221225472000 and
+ item.volumeGroupData.diskPoolData.reconstructionReservedDriveCount == 2 and
+ item.securityType == 'enabled' }}"
+ msg: "Simple storage pool failed to be created."
+ loop: "{{ lookup('list', storage_pools, wantList=True) }}"
+ vars:
+ storage_pools: "{{ current_storage_pools | json_query('json[?name==`raidDiskPool_storage`]') }}"
+
+- name: Integration cleanup
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ erase_secured_drives: yes
+ register: results
+- na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: raidDiskPool_storage
+ register: results
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml
new file mode 100644
index 000000000..79830c3d6
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_syslog/tasks/main.yml
@@ -0,0 +1,127 @@
+# Test code for the na_santricity_syslog module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_iscsi_target module's intergration test.
+ set_fact:
+ vars:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+- name: Add initial syslog server settings (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Configure initial syslog server settings (no change)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ not results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server settings with different protocol (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server settings with different port (changed)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.100
+ port: 123
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Add another syslog server address (change, check_mode)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.200
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ check_mode: true
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: (Repeat) Add another syslog server address (change)
+ na_santricity_syslog:
+ <<: *creds
+ address: 192.168.1.200
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+ register: results
+- name: Verify results
+ assert:
+ that: "{{ results['changed'] }}"
+ msg: "Unexpected results!"
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 514
+ protocol: udp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 514
+ protocol: tcp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.100
+ port: 123
+ protocol: tcp
+ components: ["auditLog"]
+
+- name: Diasable syslog server (change)
+ na_santricity_syslog:
+ <<: *creds
+ state: absent
+ address: 192.168.1.200
+ port: 1514
+ protocol: tcp
+ components: ["auditLog"]
diff --git a/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml
new file mode 100644
index 000000000..fe6d91d35
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/integration/targets/na_santricity_volume/tasks/main.yml
@@ -0,0 +1,768 @@
+# Test code for the na_santricity_volume module
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+- name: Set facts for na_santricity_host module's intergration test.
+ set_fact:
+ credentials: &creds
+ ssid: "{{ ssid }}"
+ api_url: "{{ base_url }}"
+ api_username: "{{ username }}"
+ api_password: "{{ password }}"
+ validate_certs: "{{ validate_cert }}"
+
+# test setup
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool
+ - storage_pool2
+ - storage_pool3
+
+# Thick volume testing: create, delete, expand, change properties (read/write cache), expand and change properties,
+- name: Create raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid0
+
+- name: Delete volume in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: volume
+
+- name: Create volume in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 100
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Re-execute volume creation in raid 0 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 100
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Update volume size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 200
+ size_unit: gb
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '214748364800' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- pause: seconds=15
+
+- name: Update volume properties
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 200
+ size_unit: gb
+ write_cache_enable: true
+ read_cache_enable: false
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '214748364800' and item.segmentSize == 131072 and
+ not item.cacheSettings.readCacheEnable and item.cacheSettings.writeCacheEnable}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Update volume properties and expand storage capabilities
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+# Workload tagging testing: create, utilize existing (name only, name with same attributes), modify attributes
+- name: Add workload tag (change, new workload tag)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key: volume_tag_value
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be created!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Repeat add workload tag (no change)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key: volume_tag_value
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed not to be changed"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Workload tag (no change, just using workload_name)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to not be modified!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Add workload tag (change, new attributes)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ workload_name: volume_tag
+ metadata:
+ volume_tag_key2: volume_tag_value2
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ {'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be updated!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Remove workload tag from volume (change)
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool
+ size: 300
+ size_unit: gb
+ write_cache_enable: false
+ read_cache_enable: true
+ register: results
+- pause: seconds=15
+- name: Validate volume workload changes
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '322122547200' and item.segmentSize == 131072 and
+ item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
+ item.metadata == []}}"
+ msg: "Failed to not modify volume metadata!"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- assert:
+ that: "{{ item.name == 'volume_tag' and
+ {'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
+ msg: "Workload tag failed to be updated!"
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
+
+- name: Delete workload tag
+ uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: workload_tags
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/workloads/{{ item }}"
+ method: DELETE
+ status_code: 204
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
+ vars:
+ volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`].id') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: storage_pool
+
+
+# *** Thin volume testing (May not work with simulator) ***
+- name: Create dynamic disk pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 2
+ criteria_size_unit: tb
+
+- name: Create thin volume
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 131072
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+- name: (Rerun) Create thin volume
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 131072
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ not results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+
+- name: Expand thin volume's virtual size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 262144
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 1024
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+
+- name: Expand thin volume's maximum repository size
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume
+ storage_pool_name: storage_pool
+ size: 262144
+ size_unit: gb
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 2048
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
+ item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
+ item.provisionedCapacityQuota == '2199023255552' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
+
+- name: Create dynamic disk pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool2
+ criteria_min_usable_capacity: 2
+ criteria_size_unit: tb
+- pause: seconds=15
+
+- name: Create second thin volume with manual expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 32
+ thin_volume_max_repo_size: 32
+ thin_volume_expansion_policy: manual
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '34359738368' and item.expansionPolicy == 'manual' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+
+- name: Create second thin volume with manual expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 288
+ thin_volume_max_repo_size: 288
+ thin_volume_expansion_policy: manual
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'manual' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+- name: Modify second thin volume to use automatic expansion policy
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: thin_volume2
+ storage_pool_name: storage_pool2
+ size_unit: gb
+ size: 131072
+ thin_provision: true
+ thin_volume_repo_size: 288
+ thin_volume_max_repo_size: 288
+ thin_volume_expansion_policy: automatic
+ register: results
+- pause: seconds=15
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/thin-volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
+ item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
+ item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'automatic' }}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool
+ - storage_pool2
+
+- name: Create raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid0
+
+# Thick volume expansion testing: wait and don't wait for operation to complete
+- name: Create raid 6 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: present
+ name: storage_pool3
+ criteria_min_usable_capacity: 5
+ criteria_size_unit: tb
+ erase_secured_drives: yes
+ raid_level: raid6
+
+- name: Delete volume in raid 6 storage pool
+ na_santricity_volume:
+ <<: *creds
+ state: absent
+ name: volume
+
+- name: Create volume in raid 0 storage pool for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 1
+ size_unit: gb
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '1073741824' and item.segmentSize == 131072}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Modify volume in raid 0 storage pool and wait for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 10
+ size_unit: gb
+ wait_for_initialization: True
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: expansion_state
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '10737418240' and item.segmentSize == 131072 and
+ expansion_state['json']['action'] == 'none'}}"
+ msg: "Volume expansion test failed."
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Modify volume in raid 0 storage pool and don't wait for expansion testing
+ na_santricity_volume:
+ <<: *creds
+ state: present
+ name: volume
+ storage_pool_name: storage_pool3
+ size: 100
+ size_unit: gb
+ wait_for_initialization: False
+ register: results
+- pause: seconds=10
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: current
+- uri:
+ url: "{{ credentials.api_url }}storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
+ user: "{{ credentials.api_username }}"
+ password: "{{ credentials.api_password }}"
+ validate_certs: no
+ register: expansion_state
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+- assert:
+ that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
+ item.capacity == '107374182400' and item.segmentSize == 131072 and expansion_state['json']['action'] != 'none'}}"
+ msg: "Failed to create volume"
+ loop: "{{ lookup('list', volume, wantList=True) }}"
+ vars:
+ volume: "{{ current | json_query('json[?name==`volume`]') }}"
+
+- name: Delete raid 0 storage pool
+ na_santricity_storagepool:
+ <<: *creds
+ state: absent
+ name: "{{ item }}"
+ loop:
+ - storage_pool3 \ No newline at end of file
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts.py
new file mode 100644
index 000000000..3510e5107
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts.py
@@ -0,0 +1,194 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_alerts import NetAppESeriesAlerts
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class AlertsTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1',
+ 'state': 'disabled'
+ }
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_alerts.NetAppESeriesAlerts.request'
+
+ def _set_args(self, **kwargs):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if kwargs is not None:
+ module_args.update(kwargs)
+ set_module_args(module_args)
+
+ def _validate_args(self, **kwargs):
+ self._set_args(**kwargs)
+ NetAppESeriesAlerts()
+
+ def test_validation_disable(self):
+ """Ensure a default configuration succeeds"""
+ self._validate_args()
+
+ def test_validation_enable(self):
+ """Ensure a typical, default configuration succeeds"""
+ self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
+
+ def test_validation_fail_required(self):
+ """Ensure we fail on missing configuration"""
+
+ # Missing recipients
+ with self.assertRaises(AnsibleFailJson):
+ self._validate_args(state='enabled', server='localhost', sender='x@y.z')
+ NetAppESeriesAlerts()
+
+ # Missing sender
+ with self.assertRaises(AnsibleFailJson):
+ self._validate_args(state='enabled', server='localhost', recipients=['a@b.c'])
+ NetAppESeriesAlerts()
+
+ # Missing server
+ with self.assertRaises(AnsibleFailJson):
+ self._validate_args(state='enabled', sender='x@y.z', recipients=['a@b.c'])
+
+ def test_validation_fail(self):
+ # Empty recipients
+ with self.assertRaises(AnsibleFailJson):
+ self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=[])
+
+ # Bad sender
+ with self.assertRaises(AnsibleFailJson):
+ self._validate_args(state='enabled', server='localhost', sender='y.z', recipients=['a@b.c'])
+
+ def test_get_configuration(self):
+ """Validate retrieving the current configuration"""
+ self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
+
+ expected = 'result'
+ alerts = NetAppESeriesAlerts()
+ alerts.is_proxy = lambda: False
+ alerts.is_embedded_available = lambda: False
+
+ # Expecting an update
+ with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
+ actual = alerts.get_configuration()
+ self.assertEquals(expected, actual)
+ self.assertEquals(req.call_count, 1)
+
+ def test_update_configuration(self):
+ """Validate updating the configuration"""
+ initial = dict(alertingEnabled=True,
+ emailServerAddress='localhost',
+ sendAdditionalContactInformation=True,
+ additionalContactInformation='None',
+ emailSenderAddress='x@y.z',
+ recipientEmailAddresses=['x@y.z']
+ )
+
+ args = dict(state='enabled', server=initial['emailServerAddress'], sender=initial['emailSenderAddress'],
+ contact=initial['additionalContactInformation'], recipients=initial['recipientEmailAddresses'])
+
+ self._set_args(**args)
+
+ alerts = NetAppESeriesAlerts()
+ alerts.is_proxy = lambda: False
+ alerts.is_embedded_available = lambda: False
+
+ # Ensure when trigger updates when each relevant field is changed
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)) as req:
+ with mock.patch.object(alerts, 'get_configuration', return_value=initial):
+ update = alerts.update_configuration()
+ self.assertFalse(update)
+
+ alerts.sender = 'a@b.c'
+ update = alerts.update_configuration()
+ self.assertTrue(update)
+ self._set_args(**args)
+
+ alerts.recipients = ['a@b.c']
+ update = alerts.update_configuration()
+ self.assertTrue(update)
+ self._set_args(**args)
+
+ alerts.contact = 'abc'
+ update = alerts.update_configuration()
+ self.assertTrue(update)
+ self._set_args(**args)
+
+ alerts.server = 'abc'
+ update = alerts.update_configuration()
+ self.assertTrue(update)
+
+ def test_send_test_email_check(self):
+ """Ensure we handle check_mode correctly"""
+ self._set_args(test=True)
+ alerts = NetAppESeriesAlerts()
+ alerts.check_mode = True
+ with mock.patch(self.REQ_FUNC) as req:
+ with mock.patch.object(alerts, 'update_configuration', return_value=True):
+ alerts.send_test_email()
+ self.assertFalse(req.called)
+
+ def test_send_test_email(self):
+ """Ensure we send a test email if test=True"""
+ self._set_args(test=True)
+ alerts = NetAppESeriesAlerts()
+ alerts.is_proxy = lambda: False
+ alerts.is_embedded_available = lambda: False
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, dict(response='emailSentOK'))) as req:
+ alerts.send_test_email()
+ self.assertTrue(req.called)
+
+ def test_send_test_email_fail(self):
+ """Ensure we fail if the test returned a failure status"""
+ self._set_args(test=True)
+ alerts = NetAppESeriesAlerts()
+ alerts.is_proxy = lambda: False
+ alerts.is_embedded_available = lambda: False
+
+ ret_msg = 'fail'
+ with self.assertRaisesRegexp(AnsibleFailJson, ret_msg):
+ with mock.patch(self.REQ_FUNC, return_value=(200, dict(response=ret_msg))) as req:
+ alerts.send_test_email()
+ self.assertTrue(req.called)
+
+ def test_send_test_email_fail_connection(self):
+ """Ensure we fail cleanly if we hit a connection failure"""
+ self._set_args(test=True)
+ alerts = NetAppESeriesAlerts()
+ alerts.is_proxy = lambda: False
+ alerts.is_embedded_available = lambda: False
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"failed to send"):
+ with mock.patch(self.REQ_FUNC, side_effect=Exception) as req:
+ alerts.send_test_email()
+ self.assertTrue(req.called)
+
+ def test_update(self):
+ # Ensure that when test is enabled and alerting is enabled, we run the test
+ self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'], test=True)
+ alerts = NetAppESeriesAlerts()
+ with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
+ with mock.patch.object(alerts, 'update_configuration', return_value=True):
+ with mock.patch.object(alerts, 'send_test_email') as test:
+ alerts.update()
+ self.assertTrue(test.called)
+
+ # Ensure we don't run a test when changed=False
+ with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
+ with mock.patch.object(alerts, 'update_configuration', return_value=False):
+ with mock.patch.object(alerts, 'send_test_email') as test:
+ alerts.update()
+ self.assertFalse(test.called)
+
+ # Ensure that test is not called when we have alerting disabled
+ self._set_args(state='disabled')
+ alerts = NetAppESeriesAlerts()
+ with self.assertRaisesRegexp(AnsibleExitJson, r"disabled"):
+ with mock.patch.object(alerts, 'update_configuration', return_value=True):
+ with mock.patch.object(alerts, 'send_test_email') as test:
+ alerts.update()
+ self.assertFalse(test.called)
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts_syslog.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts_syslog.py
new file mode 100644
index 000000000..758c7c21c
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_alerts_syslog.py
@@ -0,0 +1,151 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_alerts_syslog import NetAppESeriesAlertsSyslog
+from units.modules.utils import AnsibleFailJson, AnsibleExitJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NetAppESeriesAlertSyslogTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ "api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ }
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_alerts_syslog.NetAppESeriesAlertsSyslog.request'
+ BASE_REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Validate valid options."""
+ options_list = [{"servers": []},
+ {"servers": [{"address": "192.168.1.100"}]},
+ {"servers": [{"address": "192.168.1.100", "port": 1000}]},
+ {"servers": [{"address": "192.168.1.100"}, {"address": "192.168.1.200", "port": 1000}, {"address": "192.168.1.300", "port": 2000}]},
+ {"servers": [{"address": "192.168.1.101"}, {"address": "192.168.1.102"}, {"address": "192.168.1.103"},
+ {"address": "192.168.1.104"}, {"address": "192.168.1.105"}]}]
+
+ for options in options_list:
+ self._set_args(options)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ for options in options_list:
+ self._set_args(options)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": True})]):
+ syslog = NetAppESeriesAlertsSyslog()
+
+ def test_invalid_options_fail(self):
+ """Validate exceptions are thrown when invalid options are provided."""
+ options_list = [{"servers": [{"address": "192.168.1.100"}, {"address": "192.168.1.200"}, {"address": "192.168.1.300"},
+ {"address": "192.168.1.101"}, {"address": "192.168.1.102"}, {"address": "192.168.1.103"}]}]
+
+ for options in options_list:
+ self._set_args(options)
+ with self.assertRaisesRegexp(AnsibleFailJson, "Maximum number of syslog servers is 5!"):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+
+ def test_change_required_pass(self):
+ """Validate is_change_required properly reports true."""
+ options_list = [{"servers": []},
+ {"servers": [{"address": "192.168.1.100"}]},
+ {"servers": [{"address": "192.168.1.100", "port": 1000}]},
+ {"servers": [{"address": "192.168.1.100"}, {"address": "192.168.1.200", "port": 1000}, {"address": "192.168.1.300", "port": 2000}]},
+ {"servers": [{"address": "192.168.1.101"}, {"address": "192.168.1.102"}, {"address": "192.168.1.103"},
+ {"address": "192.168.1.104"}, {"address": "192.168.1.105"}]}]
+ current_config_list = [{"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}]},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 1000}]},
+ {"syslogReceivers": [{"serverName": "192.168.1.101", "portNumber": 1000}]},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}]},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}]}]
+
+ for index in range(5):
+ self._set_args(options_list[index])
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ syslog.get_current_configuration = lambda: current_config_list[index]
+ self.assertTrue(syslog.is_change_required())
+
+ def test_get_current_configuration_fail(self):
+ """Verify get_current_configuration throws expected exception."""
+ self._set_args({"servers": []})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve syslog configuration!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ syslog.get_current_configuration()
+
+ def test_no_change_required_pass(self):
+ """Validate is_change_required properly reports false."""
+ options_list = [{"servers": []},
+ {"servers": [{"address": "192.168.1.100"}]},
+ {"servers": [{"address": "192.168.1.101", "port": 1000}, {"address": "192.168.1.100", "port": 514}]}]
+ current_config_list = [{"syslogReceivers": []},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}]},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}, {"serverName": "192.168.1.101", "portNumber": 1000}]}]
+
+ for index in range(3):
+ self._set_args(options_list[index])
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ syslog.get_current_configuration = lambda: current_config_list[index]
+ self.assertFalse(syslog.is_change_required())
+
+ def test_request_body_pass(self):
+ """Verify request body is properly formatted."""
+ options_list = [{"servers": []},
+ {"servers": [{"address": "192.168.1.100"}]},
+ {"servers": [{"address": "192.168.1.101", "port": 1000}, {"address": "192.168.1.100", "port": 514}]}]
+ expected_config_list = [{"syslogReceivers": [], "defaultFacility": 3, "defaultTag": "StorageArray"},
+ {"syslogReceivers": [{"serverName": "192.168.1.100", "portNumber": 514}], "defaultFacility": 3, "defaultTag": "StorageArray"},
+ {"syslogReceivers": [{"serverName": "192.168.1.101", "portNumber": 1000}, {"serverName": "192.168.1.100", "portNumber": 514}],
+ "defaultFacility": 3, "defaultTag": "StorageArray"}]
+
+ for index in range(3):
+ self._set_args(options_list[index])
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ self.assertEqual(syslog.make_request_body(), expected_config_list[index])
+
+ def test_test_configuration_fail(self):
+ """Verify get_current_configuration throws expected exception."""
+ self._set_args({"servers": []})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to send test message!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ syslog.test_configuration()
+
+ def test_update_pass(self):
+ """Verify update method successfully completes."""
+ self._set_args({"test": True, "servers": [{"address": "192.168.1.100"}]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ syslog.is_change_required = lambda: True
+ syslog.make_request_body = lambda: {}
+ self.test_configuration = lambda: None
+
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ syslog.update()
+
+ def tests_update_fail(self):
+ """Verify update method throws expected exceptions."""
+ self._set_args({"servers": []})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesAlertsSyslog()
+ syslog.is_change_required = lambda: True
+ syslog.make_request_body = lambda: {}
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to add syslog server!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ syslog.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_asup.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_asup.py
new file mode 100644
index 000000000..84c05d59e
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_asup.py
@@ -0,0 +1,318 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import time
+from units.compat import mock
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_asup import NetAppESeriesAsup
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+
+
+class AsupTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ "api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1",
+ }
+
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_asup.NetAppESeriesAsup.request"
+ BASE_REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request'
+ TIME_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_asup.time.time"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Validate valid options."""
+ options_list = [
+ {"state": "disabled", "active": False},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "https", "routing_type": "direct"},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "https", "routing_type": "proxy", "proxy": {"host": "192.168.1.100", "port": 1234}},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "https", "routing_type": "script", "proxy": {"script": "/path/to/proxy/script.sh"}},
+ {"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]},
+ {"state": "maintenance_disabled"}
+ ]
+
+ for options in options_list:
+ self._set_args(options)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ for options in options_list:
+ self._set_args(options)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": True})]):
+ asup = NetAppESeriesAsup()
+
+ def test_invalid_options_fail(self):
+ """Verify invalid options throw expected exceptions."""
+ options_list = [
+ {"state": "enabled", "active": False, "start": 24, "end": 23, "days": ["saturday", "sunday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}},
+ {"state": "enabled", "active": False, "start": -1, "end": 23, "days": ["saturday", "sunday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}},
+ {"state": "enabled", "active": False, "start": 20, "end": 25, "days": ["saturday", "sunday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["not_a_day", "sunday"],
+ "method": "https", "routing_type": "direct"},
+ {"state": "maintenance_enabled", "maintenance_duration": 0, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]},
+ {"state": "maintenance_enabled", "maintenance_duration": 73, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]},
+ ]
+
+ for options in options_list:
+ self._set_args(options)
+ with self.assertRaises(AnsibleFailJson):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+
+ def test_get_configuration_fail(self):
+ """Verify get_configuration method throws expected exceptions."""
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve ASUP configuration!"):
+ asup.get_configuration()
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"asupCapable": False, "onDemandCapable": True})):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve ASUP configuration!"):
+ asup.get_configuration()
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"asupCapable": True, "onDemandCapable": False})):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve ASUP configuration!"):
+ asup.get_configuration()
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"asupCapable": False, "onDemandCapable": False})):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve ASUP configuration!"):
+ asup.get_configuration()
+
+ def test_in_maintenance_mode_pass(self):
+ """Verify whether asup is in maintenance mode successful."""
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"key": "ansible_asup_maintenance_stop_time", "value": str(time.time() + 10000)}])):
+ self.assertTrue(asup.in_maintenance_mode())
+
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"key": "ansible_asup_maintenance_email_list", "value": "janey@netapp.com,joe@netapp.com"},
+ {"key": "ansible_asup_maintenance_stop_time", "value": str(time.time() - 1)}])):
+ self.assertFalse(asup.in_maintenance_mode())
+
+ def test_in_maintenance_mode_fail(self):
+ """Verify that in_maintenance mode throws expected exceptions."""
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve maintenance windows information!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ asup.in_maintenance_mode()
+
+ def test_update_configuration_pass(self):
+ """Verify that update_configuration completes successfully."""
+ asup_config = [{"asupCapable": True,
+ "onDemandCapable": True,
+ "asupEnabled": True,
+ "onDemandEnabled": True,
+ "remoteDiagsEnabled": True,
+ "delivery": {"method": "smtp",
+ "routingType": "none",
+ "proxyHost": None,
+ "proxyPort": 0,
+ "proxyUserName": None,
+ "proxyPassword": None,
+ "proxyScript": None,
+ "mailRelayServer": "server@example.com",
+ "mailSenderAddress": "noreply@example.com"},
+ "destinationAddress": "autosupport@netapp.com",
+ "schedule": {"dailyMinTime": 0,
+ "dailyMaxTime": 1439,
+ "weeklyMinTime": 0,
+ "weeklyMaxTime": 1439,
+ "daysOfWeek": ["sunday", "monday", "tuesday"]}},
+ {"asupCapable": True,
+ "onDemandCapable": True,
+ "asupEnabled": True,
+ "onDemandEnabled": False,
+ "remoteDiagsEnabled": False,
+ "delivery": {
+ "method": "https",
+ "routingType": "proxyServer",
+ "proxyHost": "192.168.1.100",
+ "proxyPort": 1234,
+ "proxyUserName": None,
+ "proxyPassword": None,
+ "proxyScript": None,
+ "mailRelayServer": None,
+ "mailSenderAddress": None
+ },
+ "destinationAddress": "https://support.netapp.com/put/AsupPut/",
+ "schedule": {
+ "dailyMinTime": 1200,
+ "dailyMaxTime": 1439,
+ "weeklyMinTime": 0,
+ "weeklyMaxTime": 1439,
+ "daysOfWeek": ["sunday", "saturday"]}},
+ {"asupCapable": True,
+ "onDemandCapable": True,
+ "asupEnabled": True,
+ "onDemandEnabled": False,
+ "remoteDiagsEnabled": False,
+ "delivery": {
+ "method": "https",
+ "routingType": "proxyScript",
+ "proxyHost": None,
+ "proxyPort": 0,
+ "proxyUserName": None,
+ "proxyPassword": None,
+ "proxyScript": "/home/user/path/to/script.sh",
+ "mailRelayServer": None,
+ "mailSenderAddress": None
+ },
+ "destinationAddress": "https://support.netapp.com/put/AsupPut/",
+ "schedule": {
+ "dailyMinTime": 0,
+ "dailyMaxTime": 420,
+ "weeklyMinTime": 0,
+ "weeklyMaxTime": 1439,
+ "daysOfWeek": ["monday", "tuesday", "wednesday", "thursday", "friday"]}}]
+ options_list = [{"state": "disabled", "active": False},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["sunday"],
+ "method": "https", "routing_type": "direct"},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "https", "routing_type": "proxy", "proxy": {"host": "192.168.1.100", "port": 1234}},
+ {"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday", "sunday"],
+ "method": "https", "routing_type": "script", "proxy": {"script": "/path/to/proxy/script.sh"}},
+ {"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]},
+ {"state": "maintenance_disabled"}]
+
+ for index, options in enumerate(options_list):
+ self._set_args(options)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: asup_config[index % 3]
+ asup.in_maintenance_mode = lambda: False
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ asup.update_configuration()
+
+ def test_update_configuration_fail(self):
+ """Verify that update_configuration throws expected exceptions."""
+ asup_config = {"asupCapable": True,
+ "onDemandCapable": True,
+ "asupEnabled": True,
+ "onDemandEnabled": True,
+ "remoteDiagsEnabled": True,
+ "delivery": {"method": "smtp",
+ "routingType": "none",
+ "proxyHost": None,
+ "proxyPort": 0,
+ "proxyUserName": None,
+ "proxyPassword": None,
+ "proxyScript": None,
+ "mailRelayServer": "server@example.com",
+ "mailSenderAddress": "noreply@example.com"},
+ "destinationAddress": "autosupport@netapp.com",
+ "schedule": {"dailyMinTime": 0,
+ "dailyMaxTime": 1439,
+ "weeklyMinTime": 0,
+ "weeklyMaxTime": 1439,
+ "daysOfWeek": ["sunday", "monday", "tuesday"]}}
+
+ # Exceptions for state=="enabled" or state=="disabled"
+ self._set_args({"state": "enabled", "active": False, "start": 20, "end": 24, "days": ["saturday"],
+ "method": "email", "email": {"server": "192.168.1.100", "sender": "noreply@netapp.com"}})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: asup_config
+ asup.in_maintenance_mode = lambda: False
+ asup.validate = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to validate ASUP configuration!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ asup.update_configuration()
+ self._set_args({"state": "disabled", "active": False})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: asup_config
+ asup.in_maintenance_mode = lambda: False
+ asup.validate = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to change ASUP configuration!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ asup.update_configuration()
+
+ # Exceptions for state=="maintenance enabled"
+ self._set_args({"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": False}
+ asup.in_maintenance_mode = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "AutoSupport must be enabled before enabling or disabling maintenance mode."):
+ asup.update_configuration()
+ self._set_args({"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to enabled ASUP maintenance window."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ asup.update_configuration()
+ self._set_args({"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to store maintenance information."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None), Exception()]):
+ asup.update_configuration()
+ self._set_args({"state": "maintenance_enabled", "maintenance_duration": 24, "maintenance_emails": ["janey@netapp.com", "joe@netapp.com"]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to store maintenance information."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None), (200, None), Exception()]):
+ asup.update_configuration()
+
+ # Exceptions for state=="maintenance disabled"
+ self._set_args({"state": "maintenance_disabled"})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to disable ASUP maintenance window."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ asup.update_configuration()
+ self._set_args({"state": "maintenance_disabled"})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to store maintenance information."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None), Exception()]):
+ asup.update_configuration()
+ self._set_args({"state": "maintenance_disabled"})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ asup = NetAppESeriesAsup()
+ asup.get_configuration = lambda: {"asupEnabled": True}
+ asup.in_maintenance_mode = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to store maintenance information."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None), (200, None), Exception()]):
+ asup.update_configuration()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auditlog.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auditlog.py
new file mode 100644
index 000000000..1cb57068a
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auditlog.py
@@ -0,0 +1,205 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_auditlog import NetAppESeriesAuditLog
+from units.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NetAppESeriesAuditLogTests(ModuleTestCase):
+ REQUIRED_PARAMS = {'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1'}
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_auditlog.NetAppESeriesAuditLog.request'
+ BASE_REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request'
+ MAX_RECORDS_MAXIMUM = 50000
+ MAX_RECORDS_MINIMUM = 100
+
+ def _set_args(self, **kwargs):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if kwargs is not None:
+ module_args.update(kwargs)
+ set_module_args(module_args)
+
+ def test_max_records_argument_pass(self):
+ """Verify NetAppESeriesAuditLog argument's max_records and threshold upper and lower boundaries."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ max_records_set = (self.MAX_RECORDS_MINIMUM, 25000, self.MAX_RECORDS_MAXIMUM)
+
+ for max_records in max_records_set:
+ initial["max_records"] = max_records
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+ self.assertTrue(audit_log.max_records == max_records)
+
+ def test_max_records_argument_fail(self):
+ """Verify NetAppESeriesAuditLog arument's max_records and threshold upper and lower boundaries."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ max_records_set = (self.MAX_RECORDS_MINIMUM - 1, self.MAX_RECORDS_MAXIMUM + 1)
+
+ for max_records in max_records_set:
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log max_records count must be between 100 and 50000"):
+ initial["max_records"] = max_records
+ self._set_args(**initial)
+ NetAppESeriesAuditLog()
+
+ def test_threshold_argument_pass(self):
+ """Verify NetAppESeriesAuditLog argument's max_records and threshold upper and lower boundaries."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ threshold_set = (60, 75, 90)
+
+ for threshold in threshold_set:
+ initial["threshold"] = threshold
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+ self.assertTrue(audit_log.threshold == threshold)
+
+ def test_threshold_argument_fail(self):
+ """Verify NetAppESeriesAuditLog arument's max_records and threshold upper and lower boundaries."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ threshold_set = (59, 91)
+
+ for threshold in threshold_set:
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log percent threshold must be between 60 and 90"):
+ initial["threshold"] = threshold
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ NetAppESeriesAuditLog()
+
+ def test_get_configuration_pass(self):
+ """Validate get configuration does not throw exception when normal request is returned."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ expected = {"auditLogMaxRecords": 1000,
+ "auditLogLevel": "writeOnly",
+ "auditLogFullPolicy": "overWrite",
+ "auditLogWarningThresholdPct": 90}
+
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
+ body = audit_log.get_configuration()
+ self.assertTrue(body == expected)
+
+ def test_get_configuration_fail(self):
+ """Verify AnsibleJsonFail exception is thrown."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the audit-log configuration!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ audit_log.get_configuration()
+
+ def test_build_configuration_pass(self):
+ """Validate configuration changes will force an update."""
+ response = {"auditLogMaxRecords": 1000,
+ "auditLogLevel": "writeOnly",
+ "auditLogFullPolicy": "overWrite",
+ "auditLogWarningThresholdPct": 90}
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+ changes = [{"max_records": 50000},
+ {"log_level": "all"},
+ {"full_policy": "preventSystemAccess"},
+ {"threshold": 75}]
+
+ for change in changes:
+ initial_with_changes = initial.copy()
+ initial_with_changes.update(change)
+ self._set_args(**initial_with_changes)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, response)):
+ update = audit_log.build_configuration()
+ self.assertTrue(update)
+
+ def test_delete_log_messages_fail(self):
+ """Verify AnsibleJsonFail exception is thrown."""
+ initial = {"max_records": 1000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90}
+
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to delete audit-log messages!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ audit_log.delete_log_messages()
+
+ def test_update_configuration_delete_pass(self):
+ """Verify 422 and force successfully returns True."""
+ body = {"auditLogMaxRecords": 1000,
+ "auditLogLevel": "writeOnly",
+ "auditLogFullPolicy": "overWrite",
+ "auditLogWarningThresholdPct": 90}
+ initial = {"max_records": 2000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90,
+ "force": True}
+
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, body),
+ (422, {u"invalidFieldsIfKnown": None,
+ u"errorMessage": u"Configuration change...",
+ u"localizedMessage": u"Configuration change...",
+ u"retcode": u"auditLogImmediateFullCondition",
+ u"codeType": u"devicemgrerror"}),
+ (200, None),
+ (200, None)]):
+ self.assertTrue(audit_log.update_configuration())
+
+ def test_update_configuration_delete_skip_fail(self):
+ """Verify 422 and no force results in AnsibleJsonFail exception."""
+ body = {"auditLogMaxRecords": 1000,
+ "auditLogLevel": "writeOnly",
+ "auditLogFullPolicy": "overWrite",
+ "auditLogWarningThresholdPct": 90}
+ initial = {"max_records": 2000,
+ "log_level": "writeOnly",
+ "full_policy": "overWrite",
+ "threshold": 90,
+ "force": False}
+
+ self._set_args(**initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ audit_log = NetAppESeriesAuditLog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to update audit-log configuration!"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, body), Exception(422, {"errorMessage": "error"}),
+ (200, None), (200, None)]):
+ audit_log.update_configuration()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auth.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auth.py
new file mode 100644
index 000000000..305d6028c
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_auth.py
@@ -0,0 +1,488 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_auth import NetAppESeriesAuth
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class AuthTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "admin", "api_password": "password", "api_url": "http://localhost", "ssid": "1"}
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_auth.NetAppESeriesAuth.request"
+ SLEEP_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_auth.sleep"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_minimum_password_length_change_required_pass(self):
+ """Verify minimum_password_length_change_required returns expected values."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertFalse(auth.minimum_password_length_change_required())
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 7})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertTrue(auth.minimum_password_length_change_required())
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertFalse(auth.minimum_password_length_change_required())
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertFalse(auth.minimum_password_length_change_required())
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 7})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertTrue(auth.minimum_password_length_change_required())
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertFalse(auth.minimum_password_length_change_required())
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass", "minimum_password_length": 7})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ self.assertTrue(auth.minimum_password_length_change_required())
+
+ def test_minimum_password_length_change_required_fail(self):
+ """Verify minimum_password_length_change_required throws expected exceptions."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 10})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Password does not meet the length requirement"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": False, "minimumPasswordLength": 8})):
+ auth.minimum_password_length_change_required()
+
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Password does not meet the length requirement"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"adminPasswordSet": True, "minimumPasswordLength": 10})):
+ auth.minimum_password_length_change_required()
+
+ def test_update_minimum_password_length_pass(self):
+ """Verify update_minimum_password_length returns expected values."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = False
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = False
+ with mock.patch(self.REQ_FUNC, side_effect=[Exception(), (200, None)]):
+ auth.update_minimum_password_length()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = False
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = False
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.update_minimum_password_length()
+
+ def test_update_minimum_password_length_fail(self):
+ """Verify update_minimum_password_length throws expected exceptions."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set minimum password length."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.update_minimum_password_length()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set minimum password length."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.update_minimum_password_length()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set minimum password length."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.update_minimum_password_length()
+
+ def test_logout_system_pass(self):
+ """Verify logout_system returns expected values."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, return_value=(204, None)):
+ auth.logout_system()
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, return_value=(204, None)):
+ auth.logout_system()
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(204, None)):
+ auth.logout_system()
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass", "minimum_password_length": 8})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(204, None)):
+ auth.logout_system()
+
+ def test_password_change_required_pass(self):
+ """Verify password_change_required returns expected values."""
+ self._set_args({"ssid": "Proxy", "user": "admin"})
+ auth = NetAppESeriesAuth()
+ self.assertFalse(auth.password_change_required())
+
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": False})]):
+ self.assertTrue(auth.password_change_required())
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": False})]):
+ self.assertTrue(auth.password_change_required())
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": False})]):
+ self.assertTrue(auth.password_change_required())
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": False})]):
+ self.assertTrue(auth.password_change_required())
+
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.logout_system = lambda: None
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (200, None)]):
+ self.assertFalse(auth.password_change_required())
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (401, None)]):
+ self.assertTrue(auth.password_change_required())
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.logout_system = lambda: None
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (200, None)]):
+ self.assertFalse(auth.password_change_required())
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (401, None)]):
+ self.assertTrue(auth.password_change_required())
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.logout_system = lambda: None
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (200, {"isValidPassword": True})]):
+ self.assertFalse(auth.password_change_required())
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (200, {"isValidPassword": False})]):
+ self.assertTrue(auth.password_change_required())
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.logout_system = lambda: None
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (200, None)]):
+ self.assertFalse(auth.password_change_required())
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (401, None)]):
+ self.assertTrue(auth.password_change_required())
+
+ def test_password_change_required_fail(self):
+ """Verify password_change_required throws expected exceptions."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.logout_system = lambda: None
+ with self.assertRaisesRegexp(AnsibleFailJson, "SAML enabled! SAML disables default role based login."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (422, None)]):
+ auth.password_change_required()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.logout_system = lambda: None
+ auth.is_web_services_version_met = lambda x: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "For platforms before E2800 use SANtricity Web Services Proxy 4.1 or later!"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (404, None)]):
+ self.assertFalse(auth.password_change_required())
+ auth.is_web_services_version_met = lambda x: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to validate stored password!"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (404, None)]):
+ self.assertFalse(auth.password_change_required())
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to validate stored password!"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True}), (422, None)]):
+ self.assertFalse(auth.password_change_required())
+
+ self._set_args({"ssid": "10", "user": "monitor", "password": "monitorpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.logout_system = lambda: None
+ auth.is_web_services_version_met = lambda x: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Role based login not available! Only storage system password can be set for storage systems prior to"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, {"minimumPasswordLength": 8, "adminPasswordSet": True})]):
+ self.assertFalse(auth.password_change_required())
+
+ def test_set_array_admin_password_pass(self):
+ """Verify set_array_admin_password results."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None)]):
+ auth.set_array_admin_password()
+ with mock.patch(self.REQ_FUNC, side_effect=[Exception(), (200, None)]):
+ auth.set_array_admin_password()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None)]):
+ auth.set_array_admin_password()
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, None)]):
+ auth.set_array_admin_password()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.set_array_admin_password()
+
+ def test_set_array_admin_password_fail(self):
+ """Verify set_array_admin_password throws expected exceptions."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set proxy's admin password."):
+ with mock.patch(self.REQ_FUNC, side_effect=[Exception(), Exception()]):
+ auth.set_array_admin_password()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set storage system's admin password."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.set_array_admin_password()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set embedded storage system's admin password."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.set_array_admin_password()
+
+ def test_set_array_password_pass(self):
+ """Verify set_array_password results."""
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.set_array_password()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.set_array_password()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ auth.set_array_password()
+
+ def test_set_array_password_fail(self):
+ """Verify set_array_password throws expected exceptions."""
+ self._set_args({"ssid": "Proxy", "user": "monitor", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Admin password not set! Set admin password before changing non-admin user passwords."):
+ auth.set_array_password()
+
+ self._set_args({"ssid": "Proxy", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: False
+ auth.is_admin_password_set = True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set proxy password."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.set_array_password()
+
+ self._set_args({"ssid": "10", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: True
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set embedded user password."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.set_array_password()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_proxy = lambda: False
+ auth.is_embedded_available = lambda: True
+ auth.is_admin_password_set = True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to set embedded user password."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ auth.set_array_password()
+
+ def test_apply_pass(self):
+ """Verify apply results."""
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = True
+ auth.password_change_required = lambda: True
+ auth.minimum_password_length_change_required = lambda: True
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "'admin' password and required password length has been changed."):
+ auth.apply()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = False
+ auth.password_change_required = lambda: True
+ auth.minimum_password_length_change_required = lambda: True
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "'admin' password and required password length has been changed."):
+ auth.apply()
+
+ self._set_args({"ssid": "1", "user": "monitor", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = True
+ auth.password_change_required = lambda: True
+ auth.minimum_password_length_change_required = lambda: True
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "'monitor' password and required password length has been changed."):
+ auth.apply()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = True
+ auth.password_change_required = lambda: True
+ auth.minimum_password_length_change_required = lambda: False
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "'admin' password has been changed."):
+ auth.apply()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = True
+ auth.password_change_required = lambda: False
+ auth.minimum_password_length_change_required = lambda: True
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "Required password length has been changed."):
+ auth.apply()
+
+ self._set_args({"ssid": "1", "user": "admin", "password": "adminpass"})
+ auth = NetAppESeriesAuth()
+ auth.is_admin_password_set = True
+ auth.password_change_required = lambda: False
+ auth.minimum_password_length_change_required = lambda: False
+ auth.update_minimum_password_length = lambda: None
+ auth.set_array_admin_password = lambda: None
+ auth.set_array_password = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes have been made."):
+ auth.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_client_certificate.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_client_certificate.py
new file mode 100644
index 000000000..9541aeb8a
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_client_certificate.py
@@ -0,0 +1,373 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import datetime
+import os
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_client_certificate import NetAppESeriesClientCertificate
+from units.modules.utils import AnsibleFailJson, AnsibleExitJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NetAppESeriesClientCertificateTest(ModuleTestCase):
+
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "https://localhost:8443/devmgr/v2",
+ "ssid": "1", "validate_certs": "no"}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_client_certificate.NetAppESeriesClientCertificate.request"
+ LOAD_PEM_X509_CERTIFICATE = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_client_certificate.x509.load_pem_x509_certificate"
+ LOAD_DER_X509_CERTIFICATE = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_client_certificate.x509.load_der_x509_certificate"
+ BASE_REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request"
+
+ CERTIFICATE_PATH = "certificate.crt"
+ CERTIFICATE_CONTENT = """Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 1 (0x1)
+ Signature Algorithm: sha256WithRSAEncryption
+ Issuer: C=AU, ST=Florida, L=Palm City, O=Internet Widgits Pty Ltd
+ Validity
+ Not Before: Apr 1 19:30:07 2019 GMT
+ Not After : Mar 29 19:30:07 2029 GMT
+ Subject: C=AU, ST=Florida, O=Internet Widgits Pty Ltd, CN=test.example.com
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:ad:64:b5:4c:40:bb:0f:03:e8:2d:a3:76:af:14:
+ 49:b8:06:4a:f9:48:9b:ad:f2:69:55:42:b0:49:de:
+ cd:10:c3:37:71:1a:f8:e1:5e:88:61:b3:c3:0f:7a:
+ 3b:3e:eb:47:d3:7b:02:f9:40:6d:11:e9:c6:d0:05:
+ 3c:ab:d2:51:97:a3:c9:5d:e4:31:89:85:28:dd:96:
+ 75:c7:18:87:0e:a4:26:cb:bc:6d:2f:47:74:89:10:
+ a0:40:5c:39:4e:c2:52:bc:72:25:6c:30:48:dc:50:
+ 4e:c7:10:68:7f:96:ef:14:78:05:b3:53:5a:91:2a:
+ 8f:b0:5d:75:f0:85:b7:34:6f:78:43:44:a6:3c:4d:
+ 87:56:d0:fb:cf:53:de:50:f8:a7:70:89:68:52:83:
+ 87:32:70:da:cc:3f:d5:ae:f8:b4:8f:d9:de:40:b7:
+ 9a:15:c3:83:4b:62:73:d3:a9:e6:fe:2e:4a:33:7f:
+ 13:76:10:d5:d4:04:18:44:9c:b7:a8:17:3f:fe:4b:
+ 5d:d4:92:5e:9f:95:64:77:ef:1c:01:09:6a:a3:29:
+ 33:08:10:fa:5b:1c:ab:45:16:9d:ee:93:0b:90:d4:
+ ea:cf:0e:13:c8:73:d2:29:00:fa:c1:10:ed:20:66:
+ 4f:f5:a5:cf:8d:4e:2a:8e:4a:f2:8e:59:f1:a5:b6:
+ f5:87
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints:
+ CA:FALSE
+ Netscape Comment:
+ OpenSSL Generated Certificate
+ X509v3 Subject Key Identifier:
+ 08:21:10:B9:3E:A5:AF:63:02:88:F3:9D:77:74:FC:BB:AE:A0:BE:6F
+ X509v3 Authority Key Identifier:
+ keyid:B8:CC:D9:8C:03:C6:06:C3:C4:22:DD:04:64:70:79:0C:93:3F:5C:E8
+
+ Signature Algorithm: sha256WithRSAEncryption
+ 5b:9f:d8:f5:74:e0:66:56:99:62:d8:6f:c0:15:d9:fc:4f:8b:
+ 3d:ab:7a:a5:e0:55:49:62:fc:1f:d3:d1:71:4a:55:e9:a2:03:
+ 7b:57:8f:f2:e4:5b:9c:17:9e:e9:fe:4e:20:a7:48:87:e9:e8:
+ 80:e9:89:3c:4a:94:a2:68:6d:6d:b0:53:e3:9f:a5:dc:b9:cb:
+ 21:c3:b0:9f:1b:e1:32:8b:e3:cb:df:ba:32:bb:f4:fd:ef:83:
+ 9e:64:be:c4:37:4e:c2:90:65:60:3e:19:17:57:7f:59:9c:3d:
+ 8a:4b:4d:c6:42:ad:c4:98:d3:e1:88:74:3d:67:8b:6e:fd:85:
+ 1a:d0:ba:52:bc:24:bd:9e:74:82:d6:5f:8f:c7:2d:d8:04:b9:
+ fa:bd:e7:ef:5b:cf:d4:28:bf:c0:9a:6b:0c:7b:b7:3a:95:91:
+ 1c:f3:ad:5b:ce:48:cf:fa:c1:6e:82:f2:df:bd:ba:51:8e:00:
+ fb:86:b1:a6:a9:6a:5e:e4:e4:17:a2:35:b5:3c:fa:b1:4f:8d:
+ b7:24:53:0f:63:ac:16:f5:91:a0:15:e9:59:cd:59:55:28:a3:
+ d9:c0:70:74:30:5b:01:2a:e4:25:44:36:dd:74:f1:4a:3c:c3:
+ ad:52:51:c1:c7:79:7a:d7:21:23:a0:b6:55:c4:0d:27:40:10:
+ 4f:9c:db:04:f8:37:5a:4b:a1:9b:f2:78:b3:63:1a:c5:e3:6a:
+ a8:6d:c9:d5:73:41:91:c0:49:2c:72:32:43:73:f2:15:3e:c1:
+ 31:5d:91:b9:04:c1:78:a8:4e:cf:34:90:ee:05:f9:e5:ee:21:
+ 4c:1b:ae:55:fd:d8:c9:39:91:4c:5e:61:d9:72:10:a4:24:6a:
+ 20:c6:ad:44:0c:81:7a:ca:d5:fc:1c:6a:bf:52:9d:87:13:47:
+ dd:79:9e:6f:6e:03:be:06:7a:87:c9:5f:2d:f8:9f:c6:44:e6:
+ 05:c0:cd:28:17:2c:09:28:50:2b:12:39:ff:86:85:71:6b:f0:
+ cd:0f:4d:54:89:de:88:ee:fb:e8:e3:ba:45:97:9e:67:d6:ae:
+ 38:54:86:79:ca:fe:99:b4:20:25:d2:30:aa:3a:62:95:0f:dd:
+ 42:00:18:88:c7:1f:42:07:1d:dd:9c:42:c4:2f:56:c5:50:b1:
+ cd:6d:b9:36:df:9f:5d:f5:77:b3:cd:e4:b8:62:ed:2b:50:d0:
+ 0b:a2:31:0c:ae:20:8c:b4:0a:83:1f:20:3f:6c:d6:c7:bc:b6:
+ 84:ae:60:6e:69:2b:cb:01:22:55:a4:e5:3e:62:34:bd:20:f8:
+ 12:13:6f:25:8d:49:88:74:ba:61:51:bc:bc:8a:c6:fb:02:31:
+ ce:5b:85:df:55:d0:55:9b
+-----BEGIN CERTIFICATE-----
+MIIEqTCCApGgAwIBAgIBATANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJBVTEQ
+MA4GA1UECAwHRmxvcmlkYTESMBAGA1UEBwwJUGFsbSBDaXR5MSEwHwYDVQQKDBhJ
+bnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwHhcNMTkwNDAxMTkzMDA3WhcNMjkwMzI5
+MTkzMDA3WjBdMQswCQYDVQQGEwJBVTEQMA4GA1UECAwHRmxvcmlkYTEhMB8GA1UE
+CgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRkwFwYDVQQDDBB0ZXN0LmV4YW1w
+bGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArWS1TEC7DwPo
+LaN2rxRJuAZK+UibrfJpVUKwSd7NEMM3cRr44V6IYbPDD3o7PutH03sC+UBtEenG
+0AU8q9JRl6PJXeQxiYUo3ZZ1xxiHDqQmy7xtL0d0iRCgQFw5TsJSvHIlbDBI3FBO
+xxBof5bvFHgFs1NakSqPsF118IW3NG94Q0SmPE2HVtD7z1PeUPincIloUoOHMnDa
+zD/Vrvi0j9neQLeaFcODS2Jz06nm/i5KM38TdhDV1AQYRJy3qBc//ktd1JJen5Vk
+d+8cAQlqoykzCBD6WxyrRRad7pMLkNTqzw4TyHPSKQD6wRDtIGZP9aXPjU4qjkry
+jlnxpbb1hwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUCCEQuT6lr2MCiPOd
+d3T8u66gvm8wHwYDVR0jBBgwFoAUuMzZjAPGBsPEIt0EZHB5DJM/XOgwDQYJKoZI
+hvcNAQELBQADggIBAFuf2PV04GZWmWLYb8AV2fxPiz2reqXgVUli/B/T0XFKVemi
+A3tXj/LkW5wXnun+TiCnSIfp6IDpiTxKlKJobW2wU+Ofpdy5yyHDsJ8b4TKL48vf
+ujK79P3vg55kvsQ3TsKQZWA+GRdXf1mcPYpLTcZCrcSY0+GIdD1ni279hRrQulK8
+JL2edILWX4/HLdgEufq95+9bz9Qov8Caawx7tzqVkRzzrVvOSM/6wW6C8t+9ulGO
+APuGsaapal7k5BeiNbU8+rFPjbckUw9jrBb1kaAV6VnNWVUoo9nAcHQwWwEq5CVE
+Nt108Uo8w61SUcHHeXrXISOgtlXEDSdAEE+c2wT4N1pLoZvyeLNjGsXjaqhtydVz
+QZHASSxyMkNz8hU+wTFdkbkEwXioTs80kO4F+eXuIUwbrlX92Mk5kUxeYdlyEKQk
+aiDGrUQMgXrK1fwcar9SnYcTR915nm9uA74GeofJXy34n8ZE5gXAzSgXLAkoUCsS
+Of+GhXFr8M0PTVSJ3oju++jjukWXnmfWrjhUhnnK/pm0ICXSMKo6YpUP3UIAGIjH
+H0IHHd2cQsQvVsVQsc1tuTbfn131d7PN5Lhi7StQ0AuiMQyuIIy0CoMfID9s1se8
+toSuYG5pK8sBIlWk5T5iNL0g+BITbyWNSYh0umFRvLyKxvsCMc5bhd9V0FWb
+-----END CERTIFICATE-----"""
+ #
+ # {'expire_date': datetime.datetime(2029, 3, 29, 19, 30, 7),
+ # 'issuer_dn': [u'AU', u'Florida', u'Palm City', u'Internet Widgits Pty Ltd'],
+ # 'start_date': datetime.datetime(2019, 4, 1, 19, 30, 7),
+ # 'subject_dn': [u'AU', u'Florida', u'Internet Widgits Pty Ltd', u'test.example.com']})
+ #
+ CERTIFICATE_FINGERPRINT = b"4cb68a8039a54b2f5fbe4c55dabb92464a0149a9fce64eb779fd3211c482e44e"
+ GET_CERTIFICATE_RESPONSE_OLD = [
+ {"alias": "f869e886-4262-42de-87a6-8f99fc3e6272",
+ "subjectDN": "CN=test.example.com, O=Internet Widgits Pty Ltd, ST=Florida, C=AU",
+ "issuerDN": "O=Internet Widgits Pty Ltd, L=Palm City, ST=Florida, C=AU",
+ "start": "2019-04-01T19:30:07.000+0000", "expire": "2029-03-29T19:30:07.000+0000", "isUserInstalled": True},
+ {"alias": "ca2", "subjectDN": "sdn2", "issuerDN": "idn2",
+ "start": "2019-04-02T13:07:30.516Z", "expire": "2019-04-02T13:07:30.516Z", "isUserInstalled": False},
+ {"alias": "ca3", "subjectDN": "sdn3", "issuerDN": "idn3",
+ "start": "2019-04-02T13:07:30.516Z", "expire": "2019-04-02T13:07:30.516Z", "isUserInstalled": False},
+ {"alias": "ca4", "subjectDN": "sdn4", "issuerDN": "idn4",
+ "start": "2019-04-02T13:07:30.516Z", "expire": "2019-04-02T13:07:30.516Z", "isUserInstalled": False}]
+ GET_CERTIFICATE_RESPONSE = [
+ {'alias': 'alias1', 'expire': '2019-04-02T13:46:04.285Z', 'isKeyEntry': True, 'isUserInstalled': True,
+ 'issuerDN': 'string', 'issuerRdns': [{'attributes': [{'name': 'string', 'value': 'string'}]}],
+ 'sha256Fingerprint': b'4cb68a8039a54b2f5fbe4c55dabb92464a0149a9fce64eb779fd3211c482e44e',
+ 'shaFingerprint': b'4cb68a8039a54b2f5fbe4c55dabb92464a0149a9fce64eb779fd3211c482e44e',
+ 'start': '2019-04-02T13:46:04.285Z', 'status': 'trusted', 'subjectDN': 'string',
+ 'subjectRdns': [{'attributes': [{'name': 'string', 'value': 'string'}]}], 'truststore': True, 'type': 'selfSigned'},
+ {"alias": "alias1", "shaFingerprint": CERTIFICATE_FINGERPRINT, "sha256Fingerprint": CERTIFICATE_FINGERPRINT,
+ "subjectDN": "string", "subjectRdns": [{"attributes": [{"name": "string", "value": "string"}]}],
+ "issuerDN": "string", "issuerRdns": [{"attributes": [{"name": "string", "value": "string"}]}],
+ "start": "2019-04-02T13:46:04.285Z", "expire": "2019-04-02T13:46:04.285Z", "status": "trusted",
+ "truststore": True, "isUserInstalled": True, "isKeyEntry": True, "type": "selfSigned"},
+ {"alias": "alias1", "shaFingerprint": "123412341234", "sha256Fingerprint": "4567345673456",
+ "subjectDN": "string", "subjectRdns": [{"attributes": [{"name": "string", "value": "string"}]}],
+ "issuerDN": "string", "issuerRdns": [{"attributes": [{"name": "string", "value": "string"}]}],
+ "start": "2019-04-02T13:46:04.285Z", "expire": "2019-04-02T13:46:04.285Z", "status": "trusted",
+ "truststore": True, "isUserInstalled": True, "isKeyEntry": True, "type": "selfSigned"}
+ ]
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ if not os.path.exists(self.CERTIFICATE_PATH):
+ with open(self.CERTIFICATE_PATH, "w") as fh:
+ fh.write(self.CERTIFICATE_CONTENT)
+
+ def test_init_url_path_prefix(self):
+ """Verify url path prefix for both embedded and proxy scenarios."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.url_path_prefix, "")
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": True})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.url_path_prefix, "storage-systems/1/forward/devmgr/v2/")
+
+ self._set_args({"ssid": "0", "certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": True})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.url_path_prefix, "")
+
+ self._set_args({"ssid": "PROXY", "certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": True})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.url_path_prefix, "")
+
+ def test_certificate_info_pass(self):
+ """Determine whether certificate_info returns expected results."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.certificate_info(self.CERTIFICATE_PATH),
+ {"start_date": datetime.datetime(2019, 4, 1, 19, 30, 7),
+ "expire_date": datetime.datetime(2029, 3, 29, 19, 30, 7),
+ "subject_dn": ["AU", "Florida", "Internet Widgits Pty Ltd", "test.example.com"],
+ "issuer_dn": ["AU", "Florida", "Palm City", "Internet Widgits Pty Ltd"]})
+
+ def test_certificate_info_fail(self):
+ """Determine wehther certificate_info throws expected exceptions."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to load certificate."):
+ with mock.patch(self.LOAD_PEM_X509_CERTIFICATE, side_effect=Exception()):
+ with mock.patch(self.LOAD_DER_X509_CERTIFICATE, side_effect=Exception()):
+ certificate.certificate_info(self.CERTIFICATE_PATH)
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to open certificate file or invalid certificate object type."):
+ with mock.patch(self.LOAD_PEM_X509_CERTIFICATE, return_value=None):
+ certificate.certificate_info(self.CERTIFICATE_PATH)
+
+ def test_certificate_fingerprint_pass(self):
+ """Determine whether certificate_fingerprint returns expected results."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ self.assertEquals(certificate.certificate_fingerprint(self.CERTIFICATE_PATH), "4cb68a8039a54b2f5fbe4c55dabb92464a0149a9fce64eb779fd3211c482e44e")
+
+ def test_certificate_fingerprint_fail(self):
+ """Determine whether certificate_fingerprint throws expected exceptions."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to determine certificate fingerprint."):
+ with mock.patch(self.LOAD_PEM_X509_CERTIFICATE, side_effect=Exception()):
+ with mock.patch(self.LOAD_DER_X509_CERTIFICATE, side_effect=Exception()):
+ certificate.certificate_fingerprint(self.CERTIFICATE_PATH)
+
+ def test_determine_changes_pass(self):
+ """Determine whether determine_changes successful return expected results."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, self.GET_CERTIFICATE_RESPONSE)):
+ certificate.determine_changes()
+ self.assertEquals(certificate.add_certificates, ["certificate.crt"])
+ # self.assertEquals(certificate.remove_certificates, [])
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (200, self.GET_CERTIFICATE_RESPONSE_OLD)]):
+ certificate.determine_changes()
+ self.assertEquals(certificate.add_certificates, [])
+ # self.assertEquals(certificate.remove_certificates, [])
+
+ self._set_args({"certificates": []})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (200, self.GET_CERTIFICATE_RESPONSE_OLD)]):
+ certificate.determine_changes()
+ self.assertEquals(certificate.add_certificates, [])
+ self.assertEquals(certificate.remove_certificates, [self.GET_CERTIFICATE_RESPONSE_OLD[0]])
+
+ def test_determine_changes_fail(self):
+ """Determine whether determine_changes throws expected exceptions."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve remote server certificates."):
+ with mock.patch(self.REQUEST_FUNC, return_value=(300, [])):
+ certificate.determine_changes()
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve remote server certificates."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (300, [])]):
+ certificate.determine_changes()
+
+ def test_upload_certificate_pass(self):
+ """Validate upload_certificate successfully completes"""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, [])):
+ certificate.upload_certificate(self.CERTIFICATE_PATH)
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (200, [])]):
+ certificate.upload_certificate(self.CERTIFICATE_PATH)
+
+ def test_upload_certificate_fail(self):
+ """Validate upload_certificate successfully completes"""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to upload certificate."):
+ with mock.patch(self.REQUEST_FUNC, return_value=(300, [])):
+ certificate.upload_certificate(self.CERTIFICATE_PATH)
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to upload certificate."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (300, [])]):
+ certificate.upload_certificate(self.CERTIFICATE_PATH)
+
+ def test_delete_certificate_pass(self):
+ """Validate delete_certificate successfully completes"""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, [])):
+ certificate.delete_certificate({"alias": "alias1"})
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (200, [])]):
+ certificate.delete_certificate({"alias": "alias1"})
+
+ def test_delete_certificate_fail(self):
+ """Validate delete_certificate successfully completes"""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to delete certificate."):
+ with mock.patch(self.REQUEST_FUNC, return_value=(300, [])):
+ certificate.delete_certificate({"alias": "alias1"})
+
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to delete certificate."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(404, None), (300, [])]):
+ certificate.delete_certificate({"alias": "alias1"})
+
+ def test_apply_pass(self):
+ """Verify apply functions as expected."""
+ self._set_args({"certificates": [self.CERTIFICATE_PATH]})
+ with mock.patch(self.BASE_REQUEST_FUNC, side_effect=[(200, {"version": "03.00.0000.0000"}), (200, {"runningAsProxy": False})]):
+ certificate = NetAppESeriesClientCertificate()
+ certificate.determine_changes = lambda: None
+ certificate.delete_certificate = lambda x: None
+ certificate.upload_certificate = lambda x: None
+
+ certificate.remove_certificates = []
+ certificate.add_certificates = []
+ certificate.module.check_mode = False
+ with self.assertRaises(AnsibleExitJson):
+ certificate.apply()
+
+ certificate.remove_certificates = []
+ certificate.add_certificates = []
+ certificate.module.check_mode = True
+ with self.assertRaises(AnsibleExitJson):
+ certificate.apply()
+
+ certificate.remove_certificates = [True]
+ certificate.add_certificates = []
+ certificate.module.check_mode = False
+ with self.assertRaises(AnsibleExitJson):
+ certificate.apply()
+
+ certificate.remove_certificates = []
+ certificate.add_certificates = [True]
+ certificate.module.check_mode = False
+ with self.assertRaises(AnsibleExitJson):
+ certificate.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_discover.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_discover.py
new file mode 100644
index 000000000..5dc390ede
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_discover.py
@@ -0,0 +1,168 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_discover import NetAppESeriesDiscover
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class AlertsTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"subnet_mask": "192.168.1.0/24"}
+ BASE_REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_discover.request'
+ SLEEP_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_discover.sleep'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Verify constructor accepts valid options."""
+ options_list = [{"ports": [1, 8443]},
+ {"ports": [8080, 65535]},
+ {"ports": [8443], "proxy_url": "https://192.168.1.1:8443/devmgr/v2/", "proxy_username": "admin", "proxy_password": "adminpass"},
+ {"ports": [8443], "proxy_url": "https://192.168.1.1:8443/devmgr/v2/", "proxy_username": "admin", "proxy_password": "adminpass",
+ "prefer_embedded": True},
+ {"ports": [8443], "proxy_url": "https://192.168.1.1:8443/devmgr/v2/", "proxy_username": "admin", "proxy_password": "adminpass",
+ "prefer_embedded": False},
+ {"ports": [8443], "proxy_url": "https://192.168.1.1:8443/devmgr/v2/", "proxy_username": "admin", "proxy_password": "adminpass",
+ "proxy_validate_certs": True},
+ {"ports": [8443], "proxy_url": "https://192.168.1.1:8443/devmgr/v2/", "proxy_username": "admin", "proxy_password": "adminpass",
+ "proxy_validate_certs": False}]
+
+ for options in options_list:
+ self._set_args(options)
+ discover = NetAppESeriesDiscover()
+
+ def test_valid_options_fail(self):
+ """Verify constructor throws expected exceptions."""
+ options_list = [{"ports": [0, 8443]}, {"ports": [8080, 65536]}, {"ports": [8080, "port"]}, {"ports": [8080, -10]}, {"ports": [8080, 70000]}]
+
+ for options in options_list:
+ self._set_args(options)
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid port! Ports must be positive numbers between 0 and 65536."):
+ discover = NetAppESeriesDiscover()
+
+ def test_check_ip_address_pass(self):
+ """Verify check_ip_address successfully completes."""
+ self._set_args()
+ with mock.patch(self.BASE_REQ_FUNC, return_value=(200, {"chassisSerialNumber": "012345678901", "storageArrayLabel": "array_label"})):
+ discover = NetAppESeriesDiscover()
+ discover.check_ip_address(discover.systems_found, "192.168.1.100")
+ self.assertEqual(discover.systems_found, {"012345678901": {"api_urls": ["https://192.168.1.100:8443/devmgr/v2/storage-systems/1/"],
+ "label": "array_label", "addresses": [], "proxy_required": False}})
+
+ self._set_args({"ports": [8080, 8443]})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(404, None), (401, None), (200, {"sa": {"saData": {"chassisSerialNumber": "012345678901",
+ "storageArrayLabel": "array_label"}}})]):
+ discover = NetAppESeriesDiscover()
+ discover.check_ip_address(discover.systems_found, "192.168.1.101")
+ self.assertEqual(discover.systems_found, {"012345678901": {"api_urls": ["https://192.168.1.101:8443/devmgr/v2/storage-systems/1/"],
+ "label": "array_label", "addresses": [], "proxy_required": False}})
+
+ def test_no_proxy_discover_pass(self):
+ """Verify no_proxy_discover completes successfully."""
+ self._set_args()
+ discover = NetAppESeriesDiscover()
+ discover.check_ip_address = lambda: None
+ discover.no_proxy_discover()
+
+ def test_verify_proxy_service_pass(self):
+ """Verify verify_proxy_service completes successfully."""
+ self._set_args({"proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ with mock.patch(self.BASE_REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
+ discover.verify_proxy_service()
+
+ def test_verify_proxy_service_fail(self):
+ """Verify verify_proxy_service throws expected exception."""
+ self._set_args({"proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Web Services is not running as a proxy!"):
+ with mock.patch(self.BASE_REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
+ discover.verify_proxy_service()
+
+ self._set_args({"proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Proxy is not available! Check proxy_url."):
+ with mock.patch(self.BASE_REQ_FUNC, return_value=Exception()):
+ discover.verify_proxy_service()
+
+ def test_test_systems_found_pass(self):
+ """Verify test_systems_found adds to systems_found dictionary."""
+ self._set_args({"proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass", "prefer_embedded": True})
+ discover = NetAppESeriesDiscover()
+ with mock.patch(self.BASE_REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
+ discover.test_systems_found(discover.systems_found, "012345678901", "array_label", ["192.168.1.100", "192.168.1.102"])
+ self.assertEqual(discover.systems_found, {"012345678901": {"api_urls": ["https://192.168.1.100:8443/devmgr/v2/",
+ "https://192.168.1.102:8443/devmgr/v2/"],
+ "label": "array_label",
+ "addresses": ["192.168.1.100", "192.168.1.102"],
+ "proxy_required": False}})
+
+ def test_proxy_discover_pass(self):
+ """Verify proxy_discover completes successfully."""
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.verify_proxy_service = lambda: None
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"requestId": "1"}), (200, {"discoverProcessRunning": True}),
+ (200, {"discoverProcessRunning": False,
+ "storageSystems": [{"controllers": [{"ipAddresses": ["192.168.1.100", "192.168.1.102"]}],
+ "supportedManagementPorts": ["https"], "serialNumber": "012345678901",
+ "label": "array_label"}]})]):
+ discover.proxy_discover()
+
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.verify_proxy_service = lambda: None
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"requestId": "1"}), (200, {"discoverProcessRunning": True}),
+ (200, {"discoverProcessRunning": False,
+ "storageSystems": [{"controllers": [{"ipAddresses": ["192.168.1.100", "192.168.1.102"]}],
+ "supportedManagementPorts": [], "serialNumber": "012345678901",
+ "label": "array_label"}]})]):
+ discover.proxy_discover()
+
+ def test_proxy_discover_fail(self):
+ """Verify proxy_discover throws expected exceptions."""
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.verify_proxy_service = lambda: None
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate array discovery."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.BASE_REQ_FUNC, return_value=Exception()):
+ discover.proxy_discover()
+
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.verify_proxy_service = lambda: None
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get the discovery results."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"requestId": "1"}), Exception()]):
+ discover.proxy_discover()
+
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.verify_proxy_service = lambda: None
+ with self.assertRaisesRegexp(AnsibleFailJson, "Timeout waiting for array discovery process."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"requestId": "1"})] + [(200, {"discoverProcessRunning": True})] * 300):
+ discover.proxy_discover()
+
+ def test_discover_pass(self):
+ """Verify discover successfully completes."""
+ self._set_args({"subnet_mask": "192.168.1.0/30", "proxy_url": "https://192.168.1.200", "proxy_username": "admin", "proxy_password": "adminpass"})
+ discover = NetAppESeriesDiscover()
+ discover.proxy_discover = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "Discover process complete."):
+ discover.discover()
+
+ self._set_args()
+ discover = NetAppESeriesDiscover()
+ discover.no_proxy_discover = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, "Discover process complete."):
+ discover.discover()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_drive_firmware.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_drive_firmware.py
new file mode 100644
index 000000000..b59bd0acd
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_drive_firmware.py
@@ -0,0 +1,212 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_drive_firmware import NetAppESeriesDriveFirmware
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class HostTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1"}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_drive_firmware.NetAppESeriesDriveFirmware.request"
+ CREATE_MULTIPART_FORMDATA_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_drive_firmware.create_multipart_formdata"
+ SLEEP_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_drive_firmware.sleep"
+ UPGRADE_LIST_RESPONSE = ({"filename": "test_drive_firmware_1",
+ "driveRefList": ["010000005000C5007EDE4ECF0000000000000000",
+ "010000005000C5007EDF9AAB0000000000000000",
+ "010000005000C5007EDBE3C70000000000000000"]},
+ {"filename": "test_drive_firmware_2",
+ "driveRefList": ["010000005000C5007EDE4ECF0000000000000001",
+ "010000005000C5007EDF9AAB0000000000000001",
+ "010000005000C5007EDBE3C70000000000000001"]})
+
+ FIRMWARE_DRIVES_RESPONSE = {"compatibilities": [
+ {"filename": "test_drive_firmware_1",
+ "firmwareVersion": "MS02",
+ "supportedFirmwareVersions": ["MSB6", "MSB8", "MS00", "MS02"],
+ "compatibleDrives": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "onlineUpgradeCapable": True},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "onlineUpgradeCapable": True},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "onlineUpgradeCapable": True}]},
+ {"filename": "test_drive_firmware_2",
+ "firmwareVersion": "MS01",
+ "supportedFirmwareVersions": ["MSB8", "MS00", "MS01"],
+ "compatibleDrives": [{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "onlineUpgradeCapable": True},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "onlineUpgradeCapable": False},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "onlineUpgradeCapable": True}]}]}
+
+ def _set_args(self, args):
+ module_args = self.REQUIRED_PARAMS.copy()
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_upload_firmware(self):
+ """Verify exception is thrown"""
+ self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload drive firmware"):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ with mock.patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ firmware_object.upload_firmware()
+
+ def test_upgrade_list_pass(self):
+ """Verify upgrade_list method pass"""
+ side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS02"})]
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
+ self.assertEqual(firmware_object.upgrade_list(), [{"driveRefList": ["010000005000C5007EDE4ECF0000000000000000",
+ "010000005000C5007EDF9AAB0000000000000000"],
+ "filename": "test_drive_firmware_1"}])
+
+ side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS02"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS02"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS02"})]
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
+ self.assertEqual(firmware_object.upgrade_list(), [])
+
+ def test_upgrade_list_fail(self):
+ """Verify upgrade_list method throws expected exceptions."""
+ self._set_args({"firmware": ["path_to_test_drive_firmware_1"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to complete compatibility and health check."):
+ with mock.patch(self.REQUEST_FUNC, response=Exception()):
+ firmware_object.upgrade_list()
+
+ side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
+ Exception()]
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve drive information."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
+ firmware_object.upgrade_list()
+
+ side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
+ (200, {"offline": False, "available": True, "firmwareVersion": "MS00"})]
+ self._set_args({"firmware": ["path/to/test_drive_firmware_2"], "upgrade_drives_online": True})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Drive is not capable of online upgrade."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
+ firmware_object.upgrade_list()
+
+ def test_wait_for_upgrade_completion_pass(self):
+ """Verify function waits for okay status."""
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": True})
+ firmware_object = NetAppESeriesDriveFirmware()
+ firmware_object.upgrade_drives_online = True
+ firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "inProgress"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})]):
+ firmware_object.wait_for_upgrade_completion()
+
+ def test_wait_for_upgrade_completion_fail(self):
+ """Verify wait for upgrade completion exceptions."""
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": True})
+ firmware_object = NetAppESeriesDriveFirmware()
+ firmware_object.upgrade_drives_online = True
+ firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
+ firmware_object.WAIT_TIMEOUT_SEC = 5
+ response = (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "inProgress"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})
+ with self.assertRaisesRegexp(AnsibleFailJson, "Timed out waiting for drive firmware upgrade."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=response):
+ firmware_object.wait_for_upgrade_completion()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve drive status."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware_object.wait_for_upgrade_completion()
+
+ response = (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "_UNDEFINED"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})
+ with self.assertRaisesRegexp(AnsibleFailJson, "Drive firmware upgrade failed."):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=response):
+ firmware_object.wait_for_upgrade_completion()
+
+ def test_upgrade_pass(self):
+ """Verify upgrade upgrade in progress variable properly reports."""
+ self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": False})
+ firmware_object = NetAppESeriesDriveFirmware()
+ firmware_object.upgrade_drives_online = True
+ firmware_object.upgrade_list = lambda: {}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ firmware_object.upgrade()
+ self.assertTrue(firmware_object.upgrade_in_progress)
+
+ self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"], "wait_for_completion": True})
+ firmware_object = NetAppESeriesDriveFirmware()
+ firmware_object.upgrade_drives_online = True
+ firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {}),
+ (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
+ {"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
+ {"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})]):
+ firmware_object.upgrade()
+ self.assertFalse(firmware_object.upgrade_in_progress)
+
+ def test_upgrade_fail(self):
+ """Verify upgrade method exceptions."""
+ self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"]})
+ firmware_object = NetAppESeriesDriveFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upgrade drive firmware."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware_object.upgrade()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_facts.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_facts.py
new file mode 100644
index 000000000..d3d094278
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_facts.py
@@ -0,0 +1,470 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_facts import Facts
+from units.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class FactsTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1'
+ }
+ REQUEST_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_facts.Facts.request'
+ GET_CONTROLLERS_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_facts.Facts.get_controllers'
+ WORKLOAD_RESPONSE = [{"id": "4200000001000000000000000000000000000000", "name": "beegfs_metadata",
+ "workloadAttributes": [{"key": "profileId", "value": "ansible_workload_1"}]},
+ {"id": "4200000002000000000000000000000000000000", "name": "other_workload_1",
+ "workloadAttributes": [{"key": "profileId", "value": "Other_1"}]}]
+ GRAPH_RESPONSE = {
+ "sa": {"saData": {"storageArrayLabel": "ictm0718s01c1", "saId": {"worldWideName": "600A098000A4B28D000000005CF10481"}, "fwVersion": "08.42.30.05",
+ "chassisSerialNumber": "021633035190"},
+ "featureParameters": {"cacheBlockSizes": [4096, 8192, 16384, 32768],
+ "supportedSegSizes": [32768, 65536, 131072, 262144, 524288, 495616, 655360, 1982464]},
+ "capabilities": ["autoCodeSync", "autoLunTransfer", "subLunsAllowed", "stagedDownload", "mixedDriveTypes", "bundleMigration", "raid6",
+ "performanceTier", "secureVolume", "protectionInformation", "ssdSupport", "driveSlotLimit", "flashReadCache",
+ "storagePoolsType2", "totalNumberOfArvmMirrorsPerArray", "totalNumberOfPitsPerArray", "totalNumberOfThinVolumesPerArray"],
+ "premiumFeatures": [],
+ "hostSpecificVals": [{"hostType": "FactoryDefault", "index": 0}, {"hostType": "W2KNETNCL", "index": 1}, {"hostPortType": "W2KNETCL", "index": 8},
+ {"hostType": "LnxTPGSALUA_SF", "index": 27}, {"hostType": "LnxDHALUA", "index": 28}]}, "controller": [
+ {"active": True, "quiesced": False, "status": "optimal", "controllerRef": "070000000000000000000001",
+ "physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 1,
+ "locationParent": {"refType": "generic", "controllerRef": None, "symbolRef": "0000000000000000000000000000000000000000",
+ "typedReference": None}, "locationPosition": 1, "label": "A"}, "manufacturer": "NETAPP ",
+ "manufacturerDate": "1474675200", "appVersion": "08.42.30.05", "bootVersion": "08.42.30.05", "productID": "INF-01-00 ",
+ "productRevLevel": "0842", "serialNumber": "021619039162 ", "boardID": "2806", "cacheMemorySize": 3328, "processorMemorySize": 1278,
+ "hostInterfaces": [{"interfaceType": "iscsi", "fibre": None, "ib": None,
+ "iscsi": {"channel": 1, "channelPortRef": "1F00010001010000000000000000000000000000", "tcpListenPort": 3260,
+ "ipv4Enabled": True, "ipv4Data": {"ipv4Address": "0.0.0.0", "ipv4AddressConfigMethod": "configStatic",
+ "ipv4OutboundPacketPriority": {"isEnabled": False, "value": 1},
+ "ipv4VlanId": {"isEnabled": False, "value": 1},
+ "ipv4AddressData": {"configState": "configured", "ipv4Address": "10.10.11.110",
+ "ipv4SubnetMask": "255.255.255.0",
+ "ipv4GatewayAddress": "0.0.0.0"}},
+ "interfaceData": {"type": "ethernet", "ethernetData": {
+ "partData": {"vendorName": "QLogic Corporation", "partNumber": "83xx", "revisionNumber": "5.5.31.511",
+ "serialNumber": "00a098a4b28f"}, "macAddress": "00A098A4B293", "fullDuplex": True,
+ "maximumFramePayloadSize": 9000, "currentInterfaceSpeed": "speed10gig", "maximumInterfaceSpeed": "speed10gig",
+ "linkStatus": "up", "supportedInterfaceSpeeds": ["speed1gig", "speed10gig"], "autoconfigSupport": False,
+ "copperCableDiagnosticsSupport": False}, "infinibandData": None},
+ "interfaceRef": "2201020000000000000000000000000000000000", "ipv6Enabled": True,
+ "ipv6Data": {"ipv6LocalAddresses": [
+ {"address": "FE8000000000000002A098FFFEA4B293",
+ "addressState": {"addressType": "typeInterface", "interfaceAddressState": "configured",
+ "routerAddressState": "__UNDEFINED"}}], "ipv6RoutableAddresses": [
+ {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeInterface", "interfaceAddressState": "unconfigured",
+ "routerAddressState": "__UNDEFINED"}},
+ {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeInterface", "interfaceAddressState": "unconfigured",
+ "routerAddressState": "__UNDEFINED"}}],
+ "ipv6PortRouterAddress": {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeRouter", "interfaceAddressState": "__UNDEFINED",
+ "routerAddressState": "unknown"}},
+ "ipv6AddressConfigMethod": "configStateless", "ipv6OutboundPacketPriority": {"isEnabled": False, "value": 1},
+ "ipv6VlanId": {"isEnabled": False, "value": 1}, "ipv6HopLimit": 64, "ipv6NdReachableTime": 30000,
+ "ipv6NdRetransmitTime": 1000, "ipv6NdStaleTimeout": 30000, "ipv6DuplicateAddressDetectionAttempts": 1},
+ "physicalLocation": {"trayRef": "0000000000000000000000000000000000000000", "slot": 0,
+ "locationParent": {"refType": "generic", "controllerRef": None,
+ "symbolRef": "0000000000000000000000000000000000000000",
+ "typedReference": None}, "locationPosition": 0, "label": ""},
+ "protectionInformationCapable": True, "isIPv6Capable": True, "oneWayMaxRate": "1230000000",
+ "bidirectionalMaxRate": "2120000000", "iqn": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
+ "controllerId": "070000000000000000000001",
+ "addressId": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
+ "niceAddressId": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
+ "interfaceId": "2201020000000000000000000000000000000000", "id": "2201020000000000000000000000000000000000"},
+ "sas": None, "sata": None, "scsi": None}],
+ "driveInterfaces": [
+ {"interfaceType": "sas", "fibre": None, "ib": None, "iscsi": None,
+ "sas": {"channel": 1, "currentInterfaceSpeed": "speed12gig", "maximumInterfaceSpeed": "speed12gig", "part": "LSISAS3008",
+ "revision": 172688896, "isDegraded": False,
+ "iocPort": {
+ "parent": {"type": "controller", "controller": "070000000000000000000001", "drive": None, "expander": None, "hostBoardRef": None},
+ "attachedDevice": {"channel": 1, "channelType": "driveside",
+ "sasAttachedDeviceData": {"type": "expander", "alternateController": None, "drive": None,
+ "expander": "2000000000000000000000630001000000000000",
+ "remoteHostPortAddress": None,
+ "localController": None, "physicalLocation": None}}, "state": "optimal",
+ "miswireType": "None", "channelPortRef": "1F01000001010000000000000000000000000000",
+ "sasPhys": [{"phyIdentifier": 4, "isOperational": True}, {"phyIdentifier": 5, "isOperational": True},
+ {"phyIdentifier": 6, "isOperational": True}, {"phyIdentifier": 7, "isOperational": True}],
+ "portTypeData": {"portType": "endDevice", "portIdentifier": "500A098A4B28D004", "routingType": "__UNDEFINED"},
+ "portMode": "internal",
+ "domainNumber": 1, "attachedChannelPortRef": "0000000000000000000000000000000000000000", "discoveryStatus": 0},
+ "interfaceRef": "2201000000000000000000000000000000000000",
+ "physicalLocation": {"trayRef": "0000000000000000000000000000000000000000", "slot": 0,
+ "locationParent": {"refType": "generic", "controllerRef": None,
+ "symbolRef": "0000000000000000000000000000000000000000", "typedReference": None},
+ "locationPosition": 0, "label": ""}, "protectionInformationCapable": True, "oneWayMaxRate": "4400000000",
+ "bidirectionalMaxRate": "8400000000", "controllerId": None, "addressId": "500A098A4B28D004", "niceAddressId": "500A098A4B28D004",
+ "interfaceId": "2201000000000000000000000000000000000000", "basePortAddress": "500A098A4B28D00",
+ "id": "2201000000000000000000000000000000000000"}, "sata": None, "scsi": None}],
+ "netInterfaces": [{"interfaceType": "ethernet",
+ "ethernet": {"interfaceName": "wan0", "channel": 1, "speed": 1000, "ip": 175178176, "alias": "ictm0718s01c1-a",
+ "macAddr": "00A098A4B28D", "gatewayIp": 175177985, "subnetMask": -256, "bootpUsed": False, "rloginEnabled": True,
+ "reserved1": "0000000000000000", "setupError": False, "reserved2": "",
+ "interfaceRef": "2800070000000000000000000001000000000000", "linkStatus": "up", "ipv4Enabled": True,
+ "ipv4Address": "10.113.1.192", "ipv4SubnetMask": "255.255.255.0", "ipv4AddressConfigMethod": "configStatic",
+ "ipv6Enabled": False, "ipv6LocalAddress": {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeInterface",
+ "interfaceAddressState": "configured",
+ "routerAddressState": "__UNDEFINED"}},
+ "ipv6PortStaticRoutableAddress": {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeInterface",
+ "interfaceAddressState": "__UNDEFINED",
+ "routerAddressState": "__UNDEFINED"}},
+ "ipv6PortRoutableAddresses": [], "ipv6AddressConfigMethod": "configStatic", "fullDuplex": True,
+ "supportedSpeedSettings": ["speedAutoNegotiated", "speed10MbitHalfDuplex", "speed10MbitFullDuplex",
+ "speed100MbitHalfDuplex", "speed100MbitFullDuplex", "speed1000MbitFullDuplex"],
+ "configuredSpeedSetting": "speedAutoNegotiated", "currentSpeed": "speed1gig",
+ "physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 0,
+ "locationParent": {"refType": "controller", "controllerRef": "070000000000000000000001",
+ "symbolRef": None, "typedReference": None}, "locationPosition": 1,
+ "label": "P1"}, "ipv4GatewayAddress": "10.113.1.1",
+ "controllerRef": "070000000000000000000001", "controllerSlot": 1,
+ "dnsProperties": {
+ "acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [
+ {"addressType": "ipv4", "ipv4Address": "10.193.0.250", "ipv6Address": None},
+ {"addressType": "ipv4", "ipv4Address": "10.192.0.250", "ipv6Address": None}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {
+ "acquisitionProperties": {"ntpAcquisitionType": "stat", "ntpServers": [
+ {"addrType": "ipvx", "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.0", "ipv6Address": None}},
+ {"addrType": "ipvx", "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.4", "ipv6Address": None}}]},
+ "dhcpAcquiredNtpServers": []},
+ "id": "2800070000000000000000000001000000000000"}}],
+ "inventory": [], "reserved1": "000000000000000000000000", "reserved2": "", "hostBoardID": "None", "physicalCacheMemorySize": 4864,
+ "readyToRemove": False, "boardSubmodelID": "319", "submodelSupported": True, "oemPartNumber": "E2800A-8GB", "partNumber": "111-02829+C0 ",
+ "rtrAttributes": {"cruType": "dedicated", "parentCru": None, "rtrAttributeData": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
+ "bootTime": "1563988406", "modelName": "2806",
+ "networkSettings": {"ipv4DefaultRouterAddress": "10.113.1.1",
+ "ipv6DefaultRouterAddress": {"address": "00000000000000000000000000000000",
+ "addressState": {"addressType": "typeInterface",
+ "interfaceAddressState": "__UNDEFINED", "routerAddressState": "__UNDEFINED"}},
+ "ipv6CandidateDefaultRouterAddresses": [],
+ "remoteAccessEnabled": True,
+ "dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [
+ {"addressType": "ipv4", "ipv4Address": "10.193.0.250", "ipv6Address": None},
+ {"addressType": "ipv4", "ipv4Address": "10.192.0.250", "ipv6Address": None}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {
+ "acquisitionProperties": {
+ "ntpAcquisitionType": "stat", "ntpServers": [
+ {"addrType": "ipvx", "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.0", "ipv6Address": None}},
+ {"addrType": "ipvx", "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.4", "ipv6Address": None}}]},
+ "dhcpAcquiredNtpServers": []}},
+ "repairPolicy": {"removalData": {"removalMethod": "__UNDEFINED", "rtrAttributes": None}, "replacementMethod": "__UNDEFINED"},
+ "flashCacheMemorySize": 419430400, "ctrlIocDumpData": {"iocDumpNeedsRetrieved": False, "iocDumpTag": 0, "timeStamp": "0"},
+ "locateInProgress": False, "hasTrayIdentityIndicator": False, "controllerErrorMode": "notInErrorMode",
+ "codeVersions": [{"codeModule": "raid", "versionString": "08.42.30.05"}, {"codeModule": "hypervisor", "versionString": "08.42.30.05"},
+ {"codeModule": "management", "versionString": "11.42.0000.0026"}, {"codeModule": "iom", "versionString": "11.42.0G00.0001"},
+ {"codeModule": "bundle", "versionString": "08.42.30.05"}, {"codeModule": "bundleDisplay", "versionString": "11.40.3R2"}],
+ "id": "070000000000000000000001"}],
+ "drive": [{"offline": False, "hotSpare": False, "invalidDriveData": False, "available": True, "pfa": False,
+ "driveRef": "0100000050000396AC882ED10000000000000000", "status": "optimal", "cause": "None",
+ "interfaceType": {"driveType": "sas", "fibre": None,
+ "sas": {"deviceName": "50000396AC882ED1",
+ "drivePortAddresses": [{"channel": 2, "portIdentifier": "50000396AC882ED3"},
+ {"channel": 1, "portIdentifier": "50000396AC882ED2"}]},
+ "scsi": None},
+ "physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 6,
+ "locationParent": {"refType": "genericTyped", "controllerRef": None, "symbolRef": None,
+ "typedReference": {"componentType": "tray",
+ "symbolRef": "0E00000000000000000000000000000000000000"}},
+ "locationPosition": 6, "label": "5"}, "manufacturer": "TOSHIBA ",
+ "manufacturerDate": "1447200000", "productID": "PX04SVQ160 ", "serialNumber": "Y530A001T5MD", "softwareVersion": "MSB6", "blkSize": 512,
+ "usableCapacity": "1599784443904", "rawCapacity": "1600321314816", "worldWideName": "50000396AC882ED10000000000000000",
+ "currentVolumeGroupRef": "0000000000000000000000000000000000000000", "sparedForDriveRef": "0000000000000000000000000000000000000000",
+ "mirrorDrive": "0000000000000000000000000000000000000000", "nonRedundantAccess": False, "workingChannel": -1, "volumeGroupIndex": -1,
+ "currentSpeed": "speed12gig", "maxSpeed": "speed12gig", "uncertified": False, "hasDegradedChannel": False, "degradedChannels": [],
+ "phyDriveType": "sas", "spindleSpeed": 0, "rtrAttributes": {"cruType": "dedicated", "parentCru": None,
+ "rtrAttributeData": {"hasReadyToRemoveIndicator": False,
+ "readyToRemove": False}}, "reserved": "",
+ "phyDriveTypeData": {"phyDriveType": "sas", "sataDriveAttributes": None}, "pfaReason": "None", "bypassSource": [],
+ "repairPolicy": {"removalData": {"removalMethod": "self", "rtrAttributes": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
+ "replacementMethod": "self"}, "fdeCapable": True, "fdeEnabled": False, "fdeLocked": False,
+ "lockKeyID": "0000000000000000000000000000000000000000",
+ "ssdWearLife": {"averageEraseCountPercent": 18, "spareBlocksRemainingPercent": 91, "isWearLifeMonitoringSupported": True,
+ "percentEnduranceUsed": 18}, "driveMediaType": "ssd", "fpgaVersion": "",
+ "protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
+ "protectionInformationCapable": False, "protectionType": "type0Protection", "interposerPresent": False,
+ "interposerRef": "0000000000000000000000000000000000000000", "currentCommandAgingTimeout": 6, "defaultCommandAgingTimeout": 6,
+ "driveTemperature": {"currentTemp": 25, "refTemp": 64}, "blkSizePhysical": 4096, "lowestAlignedLBA": "0", "removed": False,
+ "locateInProgress": False, "fipsCapable": False, "firmwareVersion": "MSB6", "lockKeyIDValue": None,
+ "id": "0100000050000396AC882ED10000000000000000"},
+ {"offline": False, "hotSpare": False, "invalidDriveData": False, "available": True, "pfa": False,
+ "driveRef": "0100000050000396AC882EDD0000000000000000", "status": "optimal", "cause": "None",
+ "interfaceType": {"driveType": "sas", "fibre": None,
+ "sas": {"deviceName": "50000396AC882EDD",
+ "drivePortAddresses": [{"channel": 2, "portIdentifier": "50000396AC882EDF"},
+ {"channel": 1, "portIdentifier": "50000396AC882EDE"}]},
+ "scsi": None},
+ "physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 8,
+ "locationParent": {"refType": "genericTyped", "controllerRef": None, "symbolRef": None,
+ "typedReference": {"componentType": "tray",
+ "symbolRef": "0E00000000000000000000000000000000000000"}},
+ "locationPosition": 8, "label": "7"}, "manufacturer": "TOSHIBA ",
+ "manufacturerDate": "1447200000", "productID": "PX04SVQ160 ", "serialNumber": "Y530A004T5MD", "softwareVersion": "MSB6", "blkSize": 512,
+ "usableCapacity": "1599784443904", "rawCapacity": "1600321314816", "worldWideName": "50000396AC882EDD0000000000000000",
+ "currentVolumeGroupRef": "0000000000000000000000000000000000000000", "sparedForDriveRef": "0000000000000000000000000000000000000000",
+ "mirrorDrive": "0000000000000000000000000000000000000000", "nonRedundantAccess": False, "workingChannel": -1, "volumeGroupIndex": -1,
+ "currentSpeed": "speed12gig", "maxSpeed": "speed12gig", "uncertified": False, "hasDegradedChannel": False, "degradedChannels": [],
+ "phyDriveType": "sas", "spindleSpeed": 0, "rtrAttributes": {"cruType": "dedicated", "parentCru": None,
+ "rtrAttributeData": {"hasReadyToRemoveIndicator": False,
+ "readyToRemove": False}}, "reserved": "",
+ "phyDriveTypeData": {"phyDriveType": "sas", "sataDriveAttributes": None}, "pfaReason": "None", "bypassSource": [],
+ "repairPolicy": {"removalData": {"removalMethod": "self", "rtrAttributes": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
+ "replacementMethod": "self"}, "fdeCapable": True, "fdeEnabled": False, "fdeLocked": False,
+ "lockKeyID": "0000000000000000000000000000000000000000",
+ "ssdWearLife": {"averageEraseCountPercent": 18, "spareBlocksRemainingPercent": 91, "isWearLifeMonitoringSupported": True,
+ "percentEnduranceUsed": 18}, "driveMediaType": "ssd", "fpgaVersion": "",
+ "protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
+ "protectionInformationCapable": False, "protectionType": "type0Protection", "interposerPresent": False,
+ "interposerRef": "0000000000000000000000000000000000000000", "currentCommandAgingTimeout": 6, "defaultCommandAgingTimeout": 6,
+ "driveTemperature": {"currentTemp": 25, "refTemp": 64}, "blkSizePhysical": 4096, "lowestAlignedLBA": "0", "removed": False,
+ "locateInProgress": False, "fipsCapable": False, "firmwareVersion": "MSB6", "lockKeyIDValue": None,
+ "id": "0100000050000396AC882EDD0000000000000000"}],
+ "volumeGroup": [
+ {"sequenceNum": 1, "offline": False, "raidLevel": "raid6", "worldWideName": "600A098000A4B9D10000380A5D4AAC3C",
+ "volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C", "reserved1": "000000000000000000000000", "reserved2": "",
+ "trayLossProtection": False, "label": "beegfs_storage_vg", "state": "complete", "spindleSpeedMatch": True, "spindleSpeed": 10500,
+ "isInaccessible": False, "securityType": "capable", "drawerLossProtection": False, "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
+ "volumeGroupData": {"type": "unknown", "diskPoolData": None},
+ "usage": "standard", "driveBlockFormat": "allNative", "reservedSpaceAllocated": False, "securityLevel": "fde", "usedSpace": "1099511627776",
+ "totalRaidedSpace": "9597654597632",
+ "extents": [{"sectorOffset": "268435456", "rawCapacity": "8498142969856", "raidLevel": "raid6",
+ "volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C", "freeExtentRef": "03000000600A098000A4B9D10000380A5D4AAC3C",
+ "reserved1": "000000000000000000000000", "reserved2": ""}],
+ "largestFreeExtentSize": "8498142969856", "raidStatus": "optimal", "freeSpace": "8498142969856", "drivePhysicalType": "sas",
+ "driveMediaType": "hdd", "normalizedSpindleSpeed": "spindleSpeed10k", "diskPool": False,
+ "id": "04000000600A098000A4B9D10000380A5D4AAC3C", "name": "beegfs_storage_vg"}], "volume": [
+ {"offline": False, "extremeProtection": False, "volumeHandle": 0, "raidLevel": "raid6", "sectorOffset": "0",
+ "worldWideName": "600A098000A4B28D00003E435D4AAC54", "label": "beegfs_storage_01_1", "blkSize": 512, "capacity": "1099511627776",
+ "reconPriority": 1, "segmentSize": 131072, "action": "None",
+ "cache": {"cwob": False, "enterpriseCacheDump": False, "mirrorActive": True, "mirrorEnable": True, "readCacheActive": False,
+ "readCacheEnable": False, "writeCacheActive": True, "writeCacheEnable": True, "cacheFlushModifier": "flush10Sec",
+ "readAheadMultiplier": 1}, "mediaScan": {"enable": True, "parityValidationEnable": True},
+ "volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "status": "optimal", "volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C",
+ "currentManager": "070000000000000000000001", "preferredManager": "070000000000000000000001",
+ "perms": {"mapToLUN": True, "snapShot": True, "format": True, "reconfigure": True, "mirrorPrimary": True, "mirrorSecondary": True,
+ "copySource": True, "copyTarget": True, "readable": True, "writable": True, "rollback": True, "mirrorSync": True, "newImage": True,
+ "allowDVE": True, "allowDSS": True, "concatVolumeMember": False, "flashReadCache": True, "asyncMirrorPrimary": True,
+ "asyncMirrorSecondary": True, "pitGroup": True, "cacheParametersChangeable": True, "allowThinManualExpansion": False,
+ "allowThinGrowthParametersChange": False},
+ "mgmtClientAttribute": 0, "dssPreallocEnabled": False, "dssMaxSegmentSize": 0, "preReadRedundancyCheckEnabled": False,
+ "protectionInformationCapable": False, "protectionType": "type0Protection", "applicationTagOwned": True,
+ "repairedBlockCount": 0, "extendedUniqueIdentifier": "", "cacheMirroringValidateProtectionInformation": False,
+ "expectedProtectionInformationAppTag": 0, "volumeUse": "standardVolume", "volumeFull": False, "volumeCopyTarget": False, "volumeCopySource": False,
+ "pitBaseVolume": False, "asyncMirrorTarget": False, "asyncMirrorSource": False, "remoteMirrorSource": False, "remoteMirrorTarget": False,
+ "diskPool": False, "flashCached": False, "increasingBy": "0", "metadata": [], "dataAssurance": False, "objectType": "volume",
+ "listOfMappings": [
+ {"lunMappingRef": "88000000A1010000000000000000000000000000", "lun": 1, "ssid": 0, "perms": 15,
+ "volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "type": "host", "mapRef": "84000000600A098000A4B28D00303D065D430118",
+ "id": "88000000A1010000000000000000000000000000"}],
+ "mapped": True, "currentControllerId": "070000000000000000000001",
+ "cacheSettings": {"cwob": False, "enterpriseCacheDump": False, "mirrorActive": True, "mirrorEnable": True, "readCacheActive": False,
+ "readCacheEnable": False, "writeCacheActive": True, "writeCacheEnable": True, "cacheFlushModifier": "flush10Sec",
+ "readAheadMultiplier": 1},
+ "thinProvisioned": False, "preferredControllerId": "070000000000000000000001", "totalSizeInBytes": "1099511627776", "onlineVolumeCopy": False,
+ "wwn": "600A098000A4B28D00003E435D4AAC54", "name": "beegfs_storage_01_1", "id": "02000000600A098000A4B28D00003E435D4AAC54"}],
+ "storagePoolBundle": {"cluster": [], "host": [
+ {"hostRef": "84000000600A098000A4B28D00303D005D430107", "clusterRef": "0000000000000000000000000000000000000000", "label": "test",
+ "isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
+ "isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
+ "initiators": [
+ {"initiatorRef": "89000000600A098000A4B9D1003037005D4300F5",
+ "nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.iscsi_tests1", "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "iscsi_test1",
+ "configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
+ "hostRef": "84000000600A098000A4B28D00303D005D430107", "initiatorInactive": False, "id": "89000000600A098000A4B9D1003037005D4300F5"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.iscsi_tests1", "label": "iscsi_test1"}],
+ "id": "84000000600A098000A4B28D00303D005D430107", "name": "test"},
+ {"hostRef": "84000000600A098000A4B9D1003037035D4300F8", "clusterRef": "0000000000000000000000000000000000000000", "label": "test2",
+ "isSAControlled": True, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
+ "isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
+ "initiators": [
+ {"initiatorRef": "89000000600A098000A4B9D1003037075D4300F9",
+ "nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.iscsi_tests2", "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "iscsi_test2",
+ "configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
+ "hostRef": "84000000600A098000A4B9D1003037035D4300F8", "initiatorInactive": False, "id": "89000000600A098000A4B9D1003037075D4300F9"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.iscsi_tests2", "label": "iscsi_test2"}],
+ "id": "84000000600A098000A4B9D1003037035D4300F8", "name": "test2"},
+ {"hostRef": "84000000600A098000A4B28D00303D065D430118", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_storage1",
+ "isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
+ "isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
+ "initiators": [
+ {"initiatorRef": "89000000600A098000A4B28D00303CF55D4300E3",
+ "nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "remoteNodeWWN": None,
+ "nvmeNodeName": None}, "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_storage1_iscsi_0",
+ "configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
+ "hostRef": "84000000600A098000A4B28D00303D065D430118", "initiatorInactive": False, "id": "89000000600A098000A4B28D00303CF55D4300E3"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "label": "beegfs_storage1_iscsi_0"}],
+ "id": "84000000600A098000A4B28D00303D065D430118", "name": "beegfs_storage1"},
+ {"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_metadata1",
+ "isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
+ "isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
+ "initiators": [
+ {"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
+ "nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "remoteNodeWWN": None,
+ "nvmeNodeName": None}, "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata1_iscsi_0",
+ "configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
+ "hostRef": "84000000600A098000A4B9D10030370B5D430109", "initiatorInactive": False, "id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "label": "beegfs_metadata1_iscsi_0"}],
+ "id": "84000000600A098000A4B9D10030370B5D430109", "name": "beegfs_metadata1"}], "lunMapping": [
+ {"lunMappingRef": "8800000000000000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
+ "volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "all", "mapRef": "0000000000000000000000000000000000000000",
+ "id": "8800000000000000000000000000000000000000"},
+ {"lunMappingRef": "880000008B010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
+ "volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B28D00303D065D430118",
+ "id": "880000008B010000000000000000000000000000"},
+ {"lunMappingRef": "8800000090010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
+ "volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B9D10030370B5D430109",
+ "id": "8800000090010000000000000000000000000000"},
+ {"lunMappingRef": "8800000092010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
+ "volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B28D00303D005D430107",
+ "id": "8800000092010000000000000000000000000000"}, {"lunMappingRef": "88000000A1010000000000000000000000000000", "lun": 1, "ssid": 0, "perms": 15,
+ "volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "type": "host",
+ "mapRef": "84000000600A098000A4B28D00303D065D430118",
+ "id": "88000000A1010000000000000000000000000000"}]}, "highLevelVolBundle": {"pit": []}}
+
+ EXPECTED_GET_ARRAY_FACTS = {'facts_from_proxy': False,
+ 'netapp_controllers': [{'name': 'A', 'serial': '021619039162', 'status': 'optimal'}],
+ 'netapp_default_hostgroup_access_volume_lun': 7,
+ 'netapp_disks': [
+ {'available': True, 'firmware_version': 'MSB6', 'id': '0100000050000396AC882ED10000000000000000', 'media_type': 'ssd',
+ 'product_id': 'PX04SVQ160 ', 'serial_number': 'Y530A001T5MD', 'status': 'optimal',
+ 'tray_ref': '0E00000000000000000000000000000000000000', 'usable_bytes': '1599784443904'},
+ {'available': True, 'firmware_version': 'MSB6', 'id': '0100000050000396AC882EDD0000000000000000', 'media_type': 'ssd',
+ 'product_id': 'PX04SVQ160 ', 'serial_number': 'Y530A004T5MD', 'status': 'optimal',
+ 'tray_ref': '0E00000000000000000000000000000000000000', 'usable_bytes': '1599784443904'}],
+ 'netapp_driveside_interfaces': [{'controller': 'A', 'interface_speed': '12g', 'interface_type': 'sas'}],
+ 'netapp_enabled_features': ['autoCodeSync', 'autoLunTransfer', 'bundleMigration', 'driveSlotLimit', 'flashReadCache',
+ 'mixedDriveTypes', 'performanceTier', 'protectionInformation', 'raid6', 'secureVolume',
+ 'ssdSupport', 'stagedDownload', 'storagePoolsType2', 'subLunsAllowed',
+ 'totalNumberOfArvmMirrorsPerArray', 'totalNumberOfPitsPerArray',
+ 'totalNumberOfThinVolumesPerArray'],
+ 'netapp_host_groups': [{'hosts': ['test',
+ 'test2',
+ 'beegfs_storage1',
+ 'beegfs_metadata1'],
+ 'id': '0000000000000000000000000000000000000000',
+ 'name': 'default_hostgroup'}],
+ 'netapp_host_types': [{'index': 0, 'type': 'FactoryDefault'}, {'index': 1, 'type': 'W2KNETNCL'},
+ {'index': 27, 'type': 'LnxTPGSALUA_SF'}, {'index': 28, 'type': 'LnxDHALUA'}],
+ 'netapp_hosts': [
+ {'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
+ 'hosts_reference': '84000000600A098000A4B28D00303D005D430107',
+ 'id': '84000000600A098000A4B28D00303D005D430107', 'name': 'test',
+ 'ports': [{'address': 'iqn.iscsi_tests1', 'label': 'iscsi_test1', 'type': 'iscsi'}]},
+ {'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
+ 'hosts_reference': '84000000600A098000A4B9D1003037035D4300F8',
+ 'id': '84000000600A098000A4B9D1003037035D4300F8', 'name': 'test2',
+ 'ports': [{'address': 'iqn.iscsi_tests2', 'label': 'iscsi_test2', 'type': 'iscsi'}]},
+ {'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
+ 'hosts_reference': '84000000600A098000A4B28D00303D065D430118',
+ 'id': '84000000600A098000A4B28D00303D065D430118', 'name': 'beegfs_storage1',
+ 'ports': [{'address': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818', 'label': 'beegfs_storage1_iscsi_0',
+ 'type': 'iscsi'}]},
+ {'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
+ 'hosts_reference': '84000000600A098000A4B9D10030370B5D430109',
+ 'id': '84000000600A098000A4B9D10030370B5D430109', 'name': 'beegfs_metadata1',
+ 'ports': [{'address': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8', 'label': 'beegfs_metadata1_iscsi_0',
+ 'type': 'iscsi'}]}],
+ 'netapp_hostside_interfaces': [{'fc': [], 'ib': [],
+ 'iscsi': [
+ {'controller': 'A', 'current_interface_speed': '10g', 'ipv4_address': '10.10.11.110',
+ 'ipv4_enabled': True,
+ 'ipv4_gateway': '0.0.0.0', 'ipv4_subnet_mask': '255.255.255.0', 'ipv6_enabled': True,
+ 'iqn': 'iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481', 'link_status': 'up',
+ 'mtu': 9000,
+ 'supported_interface_speeds': ['1g', '10g']}], 'sas': []}],
+ 'netapp_luns_by_target': {'beegfs_metadata1': [],
+ 'beegfs_storage1': [('beegfs_storage_01_1', 1)],
+ 'default_hostgroup': [('beegfs_storage_01_1', 1)],
+ 'test': [],
+ 'test2': []},
+ 'netapp_management_interfaces': [
+ {'alias': 'ictm0718s01c1-a', 'channel': 1, 'controller': 'A', 'dns_config_method': 'stat',
+ 'dns_servers': [{'addressType': 'ipv4', 'ipv4Address': '10.193.0.250', 'ipv6Address': None},
+ {'addressType': 'ipv4', 'ipv4Address': '10.192.0.250', 'ipv6Address': None}],
+ 'ipv4_address': '10.113.1.192',
+ 'ipv4_address_config_method': 'static', 'ipv4_enabled': True, 'ipv4_gateway': '10.113.1.1',
+ 'ipv4_subnet_mask': '255.255.255.0', 'ipv6_enabled': False, 'link_status': 'up',
+ 'mac_address': '00A098A4B28D', 'name': 'wan0', 'ntp_config_method': 'stat',
+ 'ntp_servers': [
+ {'addrType': 'ipvx', 'domainName': None,
+ 'ipvxAddress': {'addressType': 'ipv4', 'ipv4Address': '216.239.35.0', 'ipv6Address': None}},
+ {'addrType': 'ipvx', 'domainName': None,
+ 'ipvxAddress': {'addressType': 'ipv4', 'ipv4Address': '216.239.35.4', 'ipv6Address': None}}],
+ 'remote_ssh_access': True}],
+ 'netapp_storage_array': {'cache_block_sizes': [4096, 8192, 16384, 32768], 'chassis_serial': '021633035190',
+ 'firmware': '08.42.30.05', 'name': 'ictm0718s01c1',
+ 'segment_sizes': [32768, 65536, 131072, 262144, 524288, 495616, 655360, 1982464],
+ 'wwn': '600A098000A4B28D000000005CF10481'},
+ 'netapp_storage_pools': [
+ {'available_capacity': '8498142969856', 'id': '04000000600A098000A4B9D10000380A5D4AAC3C', 'name': 'beegfs_storage_vg',
+ 'total_capacity': '9597654597632', 'used_capacity': '1099511627776'}],
+ 'netapp_volumes': [
+ {'capacity': '1099511627776', 'id': '02000000600A098000A4B28D00003E435D4AAC54', 'is_thin_provisioned': False,
+ 'name': 'beegfs_storage_01_1', 'parent_storage_pool_id': '04000000600A098000A4B9D10000380A5D4AAC3C', 'workload': []}],
+ 'netapp_volumes_by_initiators': {'beegfs_metadata1': [],
+ 'beegfs_storage1': [{'id': '02000000600A098000A4B28D00003E435D4AAC54',
+ 'meta_data': {},
+ 'name': 'beegfs_storage_01_1',
+ 'raid_level': 'raid6',
+ 'segment_size_kb': 128,
+ 'stripe_count': -2,
+ 'workload_name': '',
+ 'wwn': '600A098000A4B28D00003E435D4AAC54'}],
+
+ 'test': [], 'test2': []},
+ 'netapp_workload_tags': [
+ {'attributes': [{'key': 'profileId', 'value': 'ansible_workload_1'}], 'id': '4200000001000000000000000000000000000000',
+ 'name': 'beegfs_metadata'},
+ {'attributes': [{'key': 'profileId', 'value': 'Other_1'}], 'id': '4200000002000000000000000000000000000000',
+ 'name': 'other_workload_1'}], 'snapshot_images': [], 'ssid': '1'}
+
+ def _set_args(self, **kwargs):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if kwargs is not None:
+ module_args.update(kwargs)
+ set_module_args(module_args)
+
+ def test_get_controllers_pass(self):
+ """Verify get_controllers returns the expected results."""
+ self._set_args()
+ facts = Facts()
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, ["070000000000000000000002", "070000000000000000000001"])):
+ self.assertEqual(facts.get_controllers(), {"070000000000000000000001": "A", "070000000000000000000002": "B"})
+
+ def test_get_controllers_fail(self):
+ """Verify get_controllers throws the expected exceptions."""
+ self._set_args()
+ facts = Facts()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve controller list!"):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ facts.get_controllers()
+
+ def test_get_array_facts_pass(self):
+ """Verify get_array_facts method returns expected results."""
+ self.maxDiff = None
+ self._set_args()
+ facts = Facts()
+ facts.is_embedded = lambda: True
+ with mock.patch(self.GET_CONTROLLERS_FUNC, return_value={"070000000000000000000001": "A", "070000000000000000000002": "B"}):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GRAPH_RESPONSE), (200, self.WORKLOAD_RESPONSE)]):
+ self.assertEquals(facts.get_array_facts(), self.EXPECTED_GET_ARRAY_FACTS)
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_firmware.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_firmware.py
new file mode 100644
index 000000000..8c786d63b
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_firmware.py
@@ -0,0 +1,494 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible.module_utils import six
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_firmware import NetAppESeriesFirmware
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat.mock import patch, mock_open
+
+if six.PY2:
+ builtin_path = "__builtin__.open"
+else:
+ builtin_path = "builtins.open"
+
+
+def mock_open_with_iter(*args, **kwargs):
+ mock = mock_open(*args, **kwargs)
+
+ if six.PY2:
+ mock.return_value.__iter__ = lambda x: iter(x.readline, "")
+ else:
+ mock.return_value.__iter__ = lambda x: x
+ mock.return_value.__next__ = lambda x: iter(x.readline, "")
+ return mock
+
+
+class FirmwareTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "ssid": "1",
+ "validate_certs": "no"}
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_firmware.NetAppESeriesFirmware.request"
+ BASE_REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_firmware.request"
+ CREATE_MULTIPART_FORMDATA_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_firmware.create_multipart_formdata"
+ SLEEP_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_firmware.sleep"
+ BUNDLE_HEADER = b'combined_content\x00\x00\x00\x04\x00\x00\x07\xf8#Engenio Downloadable Package\n#Tue Jun 04 11:46:48 CDT 2019\ncheckList=compatibleBoard' \
+ b'Map,compatibleSubmodelMap,compatibleFirmwareMap,fileManifest\ncompatibleSubmodelMap=261|true,262|true,263|true,264|true,276|true,277|t' \
+ b'rue,278|true,282|true,300|true,301|true,302|true,318|true,319|true,320|true,321|true,322|true,323|true,324|true,325|true,326|true,328|t' \
+ b'rue,329|true,330|true,331|true,332|true,333|true,338|true,339|true,340|true,341|true,342|true,343|true,344|true,345|true,346|true,347|t' \
+ b'rue,356|true,357|true,390|true\nnonDisplayableAttributeList=512\ndisplayableAttributeList=FILENAME|RCB_11.40.5_280x_5ceef00e.dlp,VERSI' \
+ b'ON|11.40.5\ndacStoreLimit=512\nfileManifest=metadata.tar|metadata|08.42.50.00.000|c04275f98fc2f07bd63126fc57cb0569|bundle|10240,084250' \
+ b'00_m3_e30_842_root.img|linux|08.42.50.00|367c5216e5c4b15b904a025bff69f039|linux|1342177280,RC_08425000_m3_e30_842_280x.img|linux_cfw|0' \
+ b'8.42.50.00|e6589b0a50b29ff34b34d3ced8ae3ccb|eos|1073741824,msw.img|sam|11.42.0000.0028|ef3ee5589ab4a019a3e6f83768364aa1|linux|41943040' \
+ b'0,iom.img|iom|11.42.0G00.0003|9bb740f8d3a4e62a0f2da2ec83c254c4|linux|8177664\nmanagementVersionList=devmgr.v1142api8.Manager\ncompatib' \
+ b'leFirmwareMap=08.30.*.*|true,08.30.*.30|false,08.30.*.31|false,08.30.*.32|false,08.30.*.33|false,08.30.*.34|false,08.30.*.35|false,08.' \
+ b'30.*.36|false,08.30.*.37|false,08.30.*.38|false,08.30.*.39|false,08.40.*.*|true,08.40.*.30|false,08.40.*.31|false,08.40.*.32|false,08.4' \
+ b'0.*.33|false,08.40.*.34|false,08.40.*.35|false,08.40.*.36|false,08.40.*.37|false,08.40.*.38|false,08.40.*.39|false,08.41.*.*|true,08.4' \
+ b'1.*.30|false,08.41.*.31|false,08.41.*.32|false,08.41.*.33|false,08.41.*.34|false,08.41.*.35|false,08.41.*.36|false,08.41.*.37|false,08' \
+ b'.41.*.38|false,08.41.*.39|false,08.42.*.*|true,08.42.*.30|false,08.42.*.31|false,08.42.*.32|false,08.42.*.33|false,08.42.*.34|false,08' \
+ b'.42.*.35|false,08.42.*.36|false,08.42.*.37|false,08.42.*.38|false,08.42.*.39|false\nversion=08.42.50.00.000\ntype=tar\nversionTag=comb' \
+ b'ined_content\n'
+
+ NVSRAM_HEADER = b'nvsram \x00\x00\x00\x01\x00\x00\x00\xa0\x00\x00\x00\x04280X\x00\x00\x00\x00\x00\x00\x00\x032801 2804 2806 \x00\x00' \
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x1bArapaho controller, 8.52 FW\x00\x00\x001dual controller configuration, with cac' \
+ b'he battery\x07\x81A\x08Config\x00\x00\x0008.52.00.00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xdc\xaf\x00\x00' \
+ b'\x94\xc1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x00\x00\x00 2801 2804 2806 \x00\x00\x00\x00\x00' \
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
+ b'\x00\x00\x00\x00\x00\x00Board\n .Board Name = "NetApp RAID Controller"\n .NVSRAM Configuration Number' \
+ b' = "N280X-852834-D02"\n\nUserCfg\n .Enable Synchronous Negotiation = 0x00 \n'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_is_firmware_bundled_pass(self):
+ """Determine whether firmware file is bundled."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ with patch(builtin_path, mock_open(read_data=b"firmwarexxxxxxxx")) as mock_file:
+ firmware = NetAppESeriesFirmware()
+ self.assertEqual(firmware.is_firmware_bundled(), False)
+
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ with patch(builtin_path, mock_open(read_data=self.BUNDLE_HEADER[:16])) as mock_file:
+ firmware = NetAppESeriesFirmware()
+ self.assertEqual(firmware.is_firmware_bundled(), True)
+
+ def test_is_firmware_bundles_fail(self):
+ """Verify non-firmware fails."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ with patch(builtin_path, mock_open(read_data=b"xxxxxxxxxxxxxxxx")) as mock_file:
+ firmware = NetAppESeriesFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Firmware file is invalid."):
+ firmware.is_firmware_bundled()
+
+ def test_firmware_version(self):
+ """Verify correct firmware version is returned."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.is_firmware_bundled = lambda: True
+ with patch(builtin_path, mock_open_with_iter(read_data=self.BUNDLE_HEADER)) as mock_file:
+ self.assertEqual(firmware.firmware_version(), b"11.40.5")
+
+ def test_nvsram_version(self):
+ """Verify correct nvsram version is returned."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with patch(builtin_path, mock_open_with_iter(read_data=self.NVSRAM_HEADER)) as mock_file:
+ self.assertEqual(firmware.nvsram_version(), b"N280X-852834-D02")
+
+ def test_check_system_health_pass(self):
+ """Validate check_system_health method."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.REQUEST_FUNC, return_value=(200, {"successful": True})):
+ firmware.check_system_health()
+
+ def test_check_system_health_fail(self):
+ """Validate check_system_health method throws proper exceptions."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Health check failed!"):
+ with patch(self.REQUEST_FUNC, return_value=(404, Exception())):
+ firmware.check_system_health()
+
+ def test_embedded_check_nvsram_compatibility_pass(self):
+ """Verify embedded nvsram compatibility."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": True,
+ "fileCompatible": True,
+ "versionContents": [{"module": "nvsram",
+ "bundledVersion": "N280X-842834-D02",
+ "onboardVersion": "N280X-842834-D02"}]})):
+ firmware.embedded_check_nvsram_compatibility()
+
+ def test_embedded_check_nvsram_compatibility_fail(self):
+ """Verify embedded nvsram compatibility fails with expected exceptions."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve NVSRAM compatibility results."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.embedded_check_nvsram_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid NVSRAM file."):
+ with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": False,
+ "fileCompatible": False,
+ "versionContents": [{"module": "nvsram",
+ "bundledVersion": "N280X-842834-D02",
+ "onboardVersion": "N280X-842834-D02"}]})):
+ firmware.embedded_check_nvsram_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Incompatible NVSRAM file."):
+ with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": True,
+ "fileCompatible": False,
+ "versionContents": [{"module": "nvsram",
+ "bundledVersion": "N280X-842834-D02",
+ "onboardVersion": "N280X-842834-D02"}]})):
+ firmware.embedded_check_nvsram_compatibility()
+
+ def test_embedded_check_firmware_compatibility_pass(self):
+ """Verify embedded firmware compatibility."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {
+ "signatureTestingPassed": True,
+ "fileCompatible": True,
+ "versionContents": [
+ {"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
+ {"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
+ {"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
+ {"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
+ firmware.embedded_check_bundle_compatibility()
+
+ def test_embedded_check_firmware_compatibility_fail(self):
+ """Verify embedded firmware compatibility fails with expected exceptions."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve bundle compatibility results."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.embedded_check_bundle_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid firmware bundle file."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {
+ "signatureTestingPassed": False,
+ "fileCompatible": True,
+ "versionContents": [
+ {"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
+ {"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
+ {"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
+ {"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
+ firmware.embedded_check_bundle_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Incompatible firmware bundle file."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {
+ "signatureTestingPassed": True,
+ "fileCompatible": False,
+ "versionContents": [
+ {"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
+ {"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
+ {"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
+ {"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
+ firmware.embedded_check_bundle_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {
+ "signatureTestingPassed": True,
+ "fileCompatible": True,
+ "versionContents": [
+ {"module": "bundle", "bundledVersion": "08.42.00.00.000", "onboardVersion": "08.50.30.05"},
+ {"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
+ {"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
+ {"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
+ firmware.embedded_check_bundle_compatibility()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
+ with patch(self.REQUEST_FUNC, return_value=(200, {
+ "signatureTestingPassed": True,
+ "fileCompatible": True,
+ "versionContents": [
+ {"module": "bundle", "bundledVersion": "08.42.00.00.000", "onboardVersion": "09.20.30.05"},
+ {"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
+ {"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
+ {"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
+ {"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
+ firmware.embedded_check_bundle_compatibility()
+
+ def test_wait_for_web_services_pass(self):
+ """Verify controller reboot wait succeeds."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.firmware_version = lambda: b"08.42.30.05"
+ firmware.nvsram_version = lambda: b"N280X-842834-D02"
+ firmware.is_firmware_bundled = lambda: False
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.30.05"]), (200, ["N280X-842834-D02"]), (200, {"status": "optimal"})]):
+ firmware.wait_for_web_services()
+
+ def test_wait_for_web_services_fail(self):
+ """Verify controller reboot wait throws expected exceptions"""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Timeout waiting for Santricity Web Services."):
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with patch(self.BASE_REQUEST_FUNC, return_value=Exception()):
+ firmware.wait_for_web_services()
+
+ def test_check_nvsram_compatibility_pass(self):
+ """Verify proxy nvsram compatibility."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
+ (200, {"checkRunning": True}),
+ (200, {"checkRunning": False,
+ "results": [{"nvsramFiles": [{"filename": "test_nvsram.dlp"}]}]})]):
+ firmware.proxy_check_nvsram_compatibility()
+
+ def test_check_nvsram_compatibility_fail(self):
+ """Verify proxy nvsram compatibility throws expected exceptions."""
+ self._set_args({"firmware": "test.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to receive NVSRAM compatibility information."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_check_nvsram_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve NVSRAM status update from proxy."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}), Exception()]):
+ firmware.proxy_check_nvsram_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "NVSRAM is not compatible."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
+ (200, {"checkRunning": True}),
+ (200, {"checkRunning": False,
+ "results": [{"nvsramFiles": [{"filename": "not_test_nvsram.dlp"}]}]})]):
+ firmware.proxy_check_nvsram_compatibility()
+
+ def test_proxy_check_firmware_compatibility_pass(self):
+ """Verify proxy firmware compatibility."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
+ (200, {"checkRunning": True}),
+ (200, {"checkRunning": False,
+ "results": [{"cfwFiles": [{"filename": "test_firmware.dlp"}]}]})]):
+ firmware.proxy_check_firmware_compatibility()
+
+ def test_proxy_check_firmware_compatibility_fail(self):
+ """Verify proxy firmware compatibility throws expected exceptions."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with patch(self.SLEEP_FUNC, return_value=None):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to receive firmware compatibility information."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_check_firmware_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve firmware status update from proxy."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}), Exception()]):
+ firmware.proxy_check_firmware_compatibility(retries=0)
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Firmware bundle is not compatible."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
+ (200, {"checkRunning": True}),
+ (200, {"checkRunning": False, "results": [{"cfwFiles": [{"filename": "not_test_firmware.dlp"}]}]})]):
+ firmware.proxy_check_firmware_compatibility(retries=0)
+
+ def test_proxy_upload_and_check_compatibility_pass(self):
+ """Verify proxy_upload_and_check_compatibility"""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.proxy_check_nvsram_compatibility = lambda: None
+ firmware.proxy_check_firmware_compatibility = lambda: None
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
+ {"version": "XXXXXXXXXX", "filename": "test.dlp"}]),
+ (200, None), (200, None)]):
+ firmware.proxy_upload_and_check_compatibility()
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
+ {"version": "test_nvsram", "filename": "test_nvsram.dlp"},
+ {"version": "test", "filename": "test.dlp"},
+ {"filename": "test_firmware.dlp", "version": "test_firmware"}]),
+ (200, None), (200, None)]):
+ firmware.proxy_upload_and_check_compatibility()
+
+ def test_proxy_upload_and_check_compatibility_fail(self):
+ """Verify proxy_upload_and_check_compatibility throws expected exceptions."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.proxy_check_nvsram_compatibility = lambda: None
+ firmware.proxy_check_firmware_compatibility = lambda: None
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve existing firmware files."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_upload_and_check_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload NVSRAM file."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
+ {"version": "XXXXXXXXXX", "filename": "test.dlp"},
+ {"filename": "test_firmware.dlp", "version": "test_firmware"}]),
+ Exception()]):
+ firmware.proxy_upload_and_check_compatibility()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload firmware bundle file."):
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
+ {"version": "test_nvsram", "filename": "test_nvsram.dlp"},
+ {"version": "XXXXXXXXXX", "filename": "test.dlp"}]),
+ Exception()]):
+ firmware.proxy_upload_and_check_compatibility()
+
+ def test_proxy_check_upgrade_required_pass(self):
+ """Verify proxy_check_upgrade_required."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.firmware_version = lambda: b"08.42.50.00"
+ firmware.nvsram_version = lambda: b"nvsram_version"
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+ self.assertFalse(firmware.upgrade_required)
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+ self.assertFalse(firmware.upgrade_required)
+
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.firmware_version = lambda: b"08.42.50.00"
+ firmware.nvsram_version = lambda: b"not_nvsram_version"
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.firmware_version = lambda: b"08.52.00.00"
+ firmware.nvsram_version = lambda: b"nvsram_version"
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+ firmware.firmware_version = lambda: b"08.52.00.00"
+ firmware.nvsram_version = lambda: b"not_nvsram_version"
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+ self.assertTrue(firmware.upgrade_required)
+
+ def test_proxy_check_upgrade_required_fail(self):
+ """Verify proxy_check_upgrade_required throws expected exceptions."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ firmware.firmware_version = lambda: b"08.42.50.00"
+ firmware.nvsram_version = lambda: b"not_nvsram_version"
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve controller firmware information."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_check_upgrade_required()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage system's NVSRAM version."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), Exception()]):
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage system's NVSRAM version."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), Exception()]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
+ firmware.firmware_version = lambda: b"08.40.00.00"
+ firmware.nvsram_version = lambda: "nvsram_version"
+ firmware.is_firmware_bundled = lambda: True
+ firmware.proxy_check_upgrade_required()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
+ with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
+ firmware.is_firmware_bundled = lambda: False
+ firmware.proxy_check_upgrade_required()
+
+ def test_proxy_wait_for_upgrade_pass(self):
+ """Verify proxy_wait_for_upgrade."""
+ with patch(self.SLEEP_FUNC, return_value=None):
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "expected_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with patch(self.REQUEST_FUNC, side_effect=[(200, {"running": True}),
+ (200, {"running": False, "activationCompletionTime": "completion_time"})]):
+ firmware.proxy_wait_for_upgrade()
+
+ def test_proxy_wait_for_upgrade_fail(self):
+ """Verify proxy_wait_for_upgrade throws expected exceptions."""
+ with patch(self.SLEEP_FUNC, return_value=None):
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to complete upgrade."):
+ with patch(self.REQUEST_FUNC, return_value=(200, {"running": False, "activationCompletionTime": None})):
+ firmware.proxy_wait_for_upgrade()
+
+ def test_proxy_upgrade_fail(self):
+ """Verify proxy_upgrade throws expected exceptions."""
+ self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
+ firmware = NetAppESeriesFirmware()
+
+ firmware.is_firmware_bundled = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate firmware upgrade."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_upgrade()
+
+ firmware.is_firmware_bundled = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate firmware upgrade."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.proxy_upgrade()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_global.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_global.py
new file mode 100644
index 000000000..44ba8f4ab
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_global.py
@@ -0,0 +1,494 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_global import NetAppESeriesGlobalSettings
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat.mock import patch, mock_open
+
+
+class GlobalSettingsTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1',
+ }
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_global.NetAppESeriesGlobalSettings.request'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_init_pass(self):
+ """Verify module instantiates successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+
+ def test_init_fail(self):
+ """Verify module fails when autoload is enabled but host connectivity reporting is not."""
+ self._set_args({"automatic_load_balancing": "enabled", "host_connectivity_reporting": "disabled"})
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Option automatic_load_balancing requires host_connectivity_reporting to be enabled."):
+ instance = NetAppESeriesGlobalSettings()
+
+ def test_get_current_configuration_pass(self):
+ """Ensure get_current_configuration method succeeds."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), (200, []),
+ (200, [{"defaultHostTypeIndex": 28, "cache": {"cacheBlkSize": 32768, "demandFlushThreshold": 90}}]),
+ (200, {"autoLoadBalancingEnabled": True, "hostConnectivityReportingEnabled": True, "name": "array1"})]):
+ self.assertEqual(instance.get_current_configuration(), {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 90},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'})
+
+ def test_get_current_configuration_fail(self):
+ """Ensure exceptions are thrown when current configuration requests fail."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve storage array capabilities."):
+ with patch(self.REQ_FUNC, side_effect=[Exception()]):
+ instance.get_current_configuration()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve storage array host options."):
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), Exception()]):
+ instance.get_current_configuration()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve cache settings."):
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), (200, []), Exception()]):
+ instance.get_current_configuration()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to determine current configuration."):
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), (200, []),
+ (200, [{"defaultHostTypeIndex": 28, "cache": {"cacheBlkSize": 32768, "demandFlushThreshold": 90}}]),
+ Exception()]):
+ instance.get_current_configuration()
+
+ def test_cache_block_size_pass(self):
+ """Verify cache_block_size passes successfully."""
+ self._set_args({"cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 90},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertFalse(instance.change_cache_block_size_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 90},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertFalse(instance.change_cache_block_size_required())
+
+ self._set_args({"cache_block_size": 16384, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 90},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertTrue(instance.change_cache_block_size_required())
+
+ def test_cache_block_size_fail(self):
+ """Verify cache_block_size throws expected exceptions."""
+ self._set_args({"cache_block_size": 16384, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 90},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Invalid cache block size."):
+ self.assertTrue(instance.change_cache_block_size_required())
+
+ def test_change_cache_flush_threshold_required_pass(self):
+ """Verify change_cache_block_size_required passes successfully."""
+ self._set_args({"cache_block_size": 32768, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertFalse(instance.change_cache_flush_threshold_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 80, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertFalse(instance.change_cache_flush_threshold_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+ self.assertTrue(instance.change_cache_flush_threshold_required())
+
+ def test_change_cache_flush_threshold_required_fail(self):
+ """Verify change_cache_block_size_required throws expected exceptions."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 100, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Invalid cache flushing threshold, it must be equal to or between 0 and 100."):
+ instance.change_cache_flush_threshold_required()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 0, "default_host_type": "linux dm-mp", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {}, "name": 'array1'}
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Invalid cache flushing threshold, it must be equal to or between 0 and 100."):
+ instance.change_cache_flush_threshold_required()
+
+ def test_change_host_type_required_pass(self):
+ """Verify change_host_type_required passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_host_type_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Linux DM-MP", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_host_type_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertTrue(instance.change_host_type_required())
+
+ def test_change_host_type_required_fail(self):
+ """Verify change_host_type_required throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "NotAHostType", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Invalid host type index!"):
+ self.assertTrue(instance.change_host_type_required())
+
+ def test_change_autoload_enabled_required_pass(self):
+ """Verify change_autoload_enabled_required passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_autoload_enabled_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_autoload_enabled_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertTrue(instance.change_autoload_enabled_required())
+
+ def test_change_autoload_enabled_required_fail(self):
+ """Verify change_autoload_enabled_required throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "NotAHostType", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Automatic load balancing is not available."):
+ self.assertTrue(instance.change_autoload_enabled_required())
+
+ def test_change_host_connectivity_reporting_enabled_required_pass(self):
+ """Verify change_host_connectivity_reporting_enabled_required passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_host_connectivity_reporting_enabled_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "enabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_host_connectivity_reporting_enabled_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertTrue(instance.change_host_connectivity_reporting_enabled_required())
+
+ def test_change_name_required_pass(self):
+ """Verify change_name_required passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_name_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array1"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertFalse(instance.change_name_required())
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ self.assertTrue(instance.change_name_required())
+
+ def test_change_name_required_fail(self):
+ """Verify change_name_required throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "NotAHostType", "automatic_load_balancing": "enabled",
+ "host_connectivity_reporting": "enabled", "name": "A" * 31})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": False, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"The provided name is invalid, it must be less than or equal to 30 characters in length."):
+ self.assertTrue(instance.change_name_required())
+
+ def test_update_cache_settings_pass(self):
+ """Verify update_cache_settings passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with patch(self.REQ_FUNC, return_value=(200, None)):
+ instance.update_cache_settings()
+
+ def test_update_cache_settings_fail(self):
+ """Verify update_cache_settings throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to set cache settings."):
+ with patch(self.REQ_FUNC, return_value=Exception()):
+ instance.update_cache_settings()
+
+ def test_update_host_type_pass(self):
+ """Verify update_host_type passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with patch(self.REQ_FUNC, return_value=(200, None)):
+ instance.update_host_type()
+
+ def test_update_host_type_fail(self):
+ """Verify update_host_type throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to set default host type."):
+ with patch(self.REQ_FUNC, return_value=Exception()):
+ instance.update_host_type()
+
+ def test_update_autoload_pass(self):
+ """Verify update_autoload passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with patch(self.REQ_FUNC, return_value=(200, None)):
+ instance.update_autoload()
+
+ def test_update_autoload_fail(self):
+ """Verify update_autoload throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to set automatic load balancing state."):
+ with patch(self.REQ_FUNC, return_value=Exception()):
+ instance.update_autoload()
+
+ def test_update_host_connectivity_reporting_enabled_pass(self):
+ """Verify update_host_connectivity_reporting_enabled passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with patch(self.REQ_FUNC, return_value=(200, None)):
+ instance.update_host_connectivity_reporting_enabled()
+
+ def test_update_host_connectivity_reporting_enabled_fail(self):
+ """Verify update_host_connectivity_reporting_enabled throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to enable host connectivity reporting."):
+ with patch(self.REQ_FUNC, return_value=Exception()):
+ instance.update_host_connectivity_reporting_enabled()
+
+ def test_update_name_pass(self):
+ """Verify update_name passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with patch(self.REQ_FUNC, return_value=(200, None)):
+ instance.update_name()
+
+ def test_update_name_fail(self):
+ """Verify update_name throws expected exceptions"""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.get_current_configuration = lambda: {"autoload_capable": True, "autoload_enabled": True, "cache_block_size_options": [16384, 32768],
+ "cache_settings": {"cache_block_size": 32768, "cache_flush_threshold": 80},
+ "default_host_type_index": 28, "host_connectivity_reporting_enabled": True,
+ "host_type_options": {"windows": 1, "linux": 28}, "name": 'array1'}
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to set the storage array name!"):
+ with patch(self.REQ_FUNC, return_value=Exception()):
+ instance.update_name()
+
+ def test_update_pass(self):
+ """Verify update passes successfully."""
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+
+ instance.change_autoload_enabled_required = lambda: False
+ instance.change_cache_block_size_required = lambda: False
+ instance.change_cache_flush_threshold_required = lambda: False
+ instance.change_host_type_required = lambda: False
+ instance.change_name_required = lambda: False
+ instance.change_host_connectivity_reporting_enabled_required = lambda: False
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': False"):
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), (200, []),
+ (200, [{"defaultHostTypeIndex": 28, "cache": {"cacheBlkSize": 32768, "demandFlushThreshold": 90}}]),
+ (200, {"autoLoadBalancingEnabled": True, "hostConnectivityReportingEnabled": True, "name": "array1"})] * 2):
+ instance.update()
+
+ self._set_args({"cache_block_size": 32768, "cache_flush_threshold": 90, "default_host_type": "Windows", "automatic_load_balancing": "disabled",
+ "host_connectivity_reporting": "disabled", "name": "array2"})
+ instance = NetAppESeriesGlobalSettings()
+ instance.change_autoload_enabled_required = lambda: True
+ instance.change_cache_block_size_required = lambda: False
+ instance.change_cache_flush_threshold_required = lambda: False
+ instance.change_host_type_required = lambda: False
+ instance.change_name_required = lambda: False
+ instance.change_host_connectivity_reporting_enabled_required = lambda: False
+ instance.update_autoload = lambda: None
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ with patch(self.REQ_FUNC, side_effect=[(200, {"productCapabilities": [], "featureParameters": {"cacheBlockSizes": []}}), (200, []),
+ (200, [{"defaultHostTypeIndex": 28, "cache": {"cacheBlkSize": 32768, "demandFlushThreshold": 90}}]),
+ (200, {"autoLoadBalancingEnabled": True, "hostConnectivityReportingEnabled": True, "name": "array1"})] * 2):
+ instance.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_host.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_host.py
new file mode 100644
index 000000000..646010ffc
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_host.py
@@ -0,0 +1,434 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_host import NetAppESeriesHost
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class HostTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1',
+ 'name': '1',
+ }
+ HOST = {
+ 'name': '1',
+ 'hostRef': '123',
+ 'label': '1',
+ 'id': '0' * 30,
+ 'clusterRef': 40 * '0',
+ 'hostTypeIndex': 28,
+ 'hostSidePorts': [],
+ 'initiators': [],
+ 'ports': [],
+ }
+ HOST_ALT = {
+ 'name': '2',
+ 'label': '2',
+ 'id': '1' * 30,
+ 'clusterRef': '1',
+ 'hostSidePorts': [],
+ 'initiators': [],
+ 'ports': [],
+ }
+ EXISTING_HOSTS = [
+ {"hostRef": "84000000600A098000A4B28D00303D065D430118", "clusterRef": "0000000000000000000000000000000000000000", "label": "Beegfs_storage1",
+ "hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CF55D4300E3",
+ "nodeName": {"ioInterfaceType": "iscsi",
+ "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818",
+ "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_storage1_iscsi_0",
+ "hostRef": "84000000600A098000A4B28D00303D065D430118",
+ "id": "89000000600A098000A4B28D00303CF55D4300E3"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "label": "beegfs_storage1_iscsi_0"}],
+ "id": "84000000600A098000A4B28D00303D065D430118", "name": "beegfs_storage1"},
+ {"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_metadata1",
+ "hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
+ "nodeName": {"ioInterfaceType": "iscsi",
+ "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8",
+ "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata1_iscsi_0",
+ "hostRef": "84000000600A098000A4B9D10030370B5D430109",
+ "id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "label": "beegfs_metadata1_iscsi_0"}],
+ "id": "84000000600A098000A4B9D10030370B5D430109", "name": "beegfs_metadata1"},
+ {"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "85000000600A098000A4B9D1003637135D483DEB", "label": "beegfs_metadata2",
+ "hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
+ "nodeName": {"ioInterfaceType": "iscsi",
+ "iscsiNodeName": "iqn.used_elsewhere",
+ "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata2_iscsi_0",
+ "hostRef": "84000000600A098000A4B9D10030370B5D430109",
+ "id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
+ "hostSidePorts": [{"type": "iscsi", "address": "iqn.used_elsewhere", "label": "beegfs_metadata2_iscsi_0"}],
+ "id": "84000000600A098000A4B9D10030370B5D430120", "name": "beegfs_metadata2"}]
+ HOST_GROUPS = [{"clusterRef": "85000000600A098000A4B9D1003637135D483DEB", "label": "test_group", "isSAControlled": False,
+ "confirmLUNMappingCreation": False, "protectionInformationCapableAccessMethod": True, "isLun0Restricted": False,
+ "id": "85000000600A098000A4B9D1003637135D483DEB", "name": "test_group"}]
+ HOST_TYPES = [{"name": "FactoryDefault", "index": 0, "code": "FactoryDefault"},
+ {"name": "Windows 2000/Server 2003/Server 2008 Non-Clustered", "index": 1, "code": "W2KNETNCL"},
+ {"name": "Solaris", "index": 2, "code": "SOL"},
+ {"name": "Linux", "index": 6, "code": "LNX"},
+ {"name": "LnxALUA", "index": 7, "code": "LnxALUA"},
+ {"name": "Windows 2000/Server 2003/Server 2008 Clustered", "index": 8, "code": "W2KNETCL"},
+ {"name": "LnxTPGSALUA_SF", "index": 27, "code": "LnxTPGSALUA_SF"},
+ {"name": "LnxDHALUA", "index": 28, "code": "LnxDHALUA"}]
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_host.NetAppESeriesHost.request'
+
+ def _set_args(self, args):
+ module_args = self.REQUIRED_PARAMS.copy()
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_host_exists_pass(self):
+ """Verify host_exists produces expected results."""
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'new_host', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'new_host_port_1', 'type': 'fc', 'port': '0x08ef08ef08ef08ef'}]})
+ host = NetAppESeriesHost()
+ self.assertFalse(host.host_exists)
+
+ self._set_args({'state': 'present', 'name': 'does_not_exist', 'host_type': 'linux dm-mp',
+ 'ports': [{'label': 'beegfs_storage1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ self.assertFalse(host.host_exists)
+
+ self._set_args({'state': 'present', 'name': 'beegfs_storage1', 'host_type': 'linux dm-mp',
+ 'ports': [{'label': 'beegfs_storage1_iscsi_0', 'type': 'iscsi', 'port': 'iqn.differentiqn.org'}]})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.host_exists)
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.host_exists)
+
+ def test_host_exists_fail(self):
+ """Verify host_exists produces expected exceptions."""
+ self._set_args({'state': 'present', 'host_type': 'linux dm-mp', 'ports': [{'label': 'abc', 'type': 'iscsi', 'port': 'iqn:0'}]})
+ host = NetAppESeriesHost()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to determine host existence."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ exists = host.host_exists
+
+ def test_needs_update_pass(self):
+ """Verify needs_update produces expected results."""
+ # No changes
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp',
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertFalse(host.needs_update)
+
+ # Change host type
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+
+ # Add port to host
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+
+ # Change port name
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+
+ # take port from another host by force
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+
+ def test_needs_update_fail(self):
+ """Verify needs_update produces expected exceptions."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "is associated with a different host."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ host.needs_update
+
+ def test_valid_host_type_pass(self):
+ """Validate the available host types."""
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_TYPES)):
+ self._set_args({'state': 'present', 'host_type': '0'})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.valid_host_type)
+ self._set_args({'state': 'present', 'host_type': '28'})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.valid_host_type)
+ self._set_args({'state': 'present', 'host_type': 'windows'})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.valid_host_type)
+ self._set_args({'state': 'present', 'host_type': 'linux dm-mp'})
+ host = NetAppESeriesHost()
+ self.assertTrue(host.valid_host_type)
+
+ def test_valid_host_type_fail(self):
+ """Validate the available host types."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "host_type must be either a host type name or host type index found integer the documentation"):
+ self._set_args({'state': 'present', 'host_type': 'non-host-type'})
+ host = NetAppESeriesHost()
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_TYPES)):
+ with self.assertRaisesRegexp(AnsibleFailJson, "There is no host type with index"):
+ self._set_args({'state': 'present', 'host_type': '4'})
+ host = NetAppESeriesHost()
+ valid = host.valid_host_type
+
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get host types."):
+ self._set_args({'state': 'present', 'host_type': '4'})
+ host = NetAppESeriesHost()
+ valid = host.valid_host_type
+
+ def test_assigned_host_ports_pass(self):
+ """Verify assigned_host_ports gives expected results."""
+
+ # Add an unused port to host
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ self.assertEquals(host.assigned_host_ports(), {})
+
+ # Change port name (force)
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ self.assertEquals(host.assigned_host_ports(), {'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
+
+ # Change port type
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'fc', 'port': '08:ef:7e:24:52:a0'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ self.assertEquals(host.assigned_host_ports(), {})
+
+ # take port from another host by force
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ self.assertEquals(host.assigned_host_ports(), {'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
+
+ # take port from another host by force
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), (200, {})]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ self.assertEquals(host.assigned_host_ports(apply_unassigning=True),
+ {'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
+
+ def test_assigned_host_ports_fail(self):
+ """Verify assigned_host_ports gives expected exceptions."""
+ # take port from another
+ with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.assigned_host_ports(apply_unassigning=True)
+
+ # take port from another host and fail because force == False
+ with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.assigned_host_ports(apply_unassigning=True)
+
+ # take port from another host and fail because force == False
+ with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata3', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ host.assigned_host_ports(apply_unassigning=True)
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to unassign host port."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), Exception()]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
+ host = NetAppESeriesHost()
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.assigned_host_ports(apply_unassigning=True)
+
+ def test_update_host_pass(self):
+ """Verify update_host produces expected results."""
+ # Change host type
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ host.build_success_payload = lambda x: {}
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.update_host()
+
+ # Change port iqn
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
+ host = NetAppESeriesHost()
+ host.build_success_payload = lambda x: {}
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.update_host()
+
+ # Change port type to fc
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'fc', 'port': '0x08ef08ef08ef08ef'}]})
+ host = NetAppESeriesHost()
+ host.build_success_payload = lambda x: {}
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.update_host()
+
+ # Change port name
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_12', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ host.build_success_payload = lambda x: {}
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.update_host()
+
+ def test_update_host_fail(self):
+ """Verify update_host produces expected exceptions."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update host."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), Exception()]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ host.build_success_payload = lambda x: {}
+ exists = host.host_exists
+ self.assertTrue(host.needs_update)
+ host.update_host()
+
+ def test_create_host_pass(self):
+ """Verify create_host produces expected results."""
+ def _assigned_host_ports(apply_unassigning=False):
+ return None
+
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, {'id': '84000000600A098000A4B9D10030370B5D430109'})):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [])):
+ host.assigned_host_ports = _assigned_host_ports
+ host.build_success_payload = lambda x: {}
+ host.create_host()
+
+ def test_create_host_fail(self):
+ """Verify create_host produces expected exceptions."""
+ def _assigned_host_ports(apply_unassigning=False):
+ return None
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create host."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, []), Exception()]):
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ host.assigned_host_ports = _assigned_host_ports
+ host.build_success_payload = lambda x: {}
+ host.create_host()
+
+ with self.assertRaisesRegexp(AnsibleExitJson, "Host already exists."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
+ self._set_args({'state': 'present', 'name': 'beegfs_storage1', 'host_type': 'linux dm-mp', 'force_port': True,
+ 'ports': [{'label': 'beegfs_storage1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ host.assigned_host_ports = _assigned_host_ports
+ host.build_success_payload = lambda x: {}
+ host.create_host()
+
+ def test_remove_host_pass(self):
+ """Verify remove_host produces expected results."""
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ self._set_args({'state': 'absent', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ host.host_obj = {"id": "84000000600A098000A4B9D10030370B5D430109"}
+ host.remove_host()
+
+ def test_remove_host_fail(self):
+ """Verify remove_host produces expected exceptions."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to remove host."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ self._set_args({'state': 'absent', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
+ 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
+ host = NetAppESeriesHost()
+ host.host_obj = {"id": "84000000600A098000A4B9D10030370B5D430109"}
+ host.remove_host()
+
+ def test_build_success_payload(self):
+ """Validate success payload."""
+ def _assigned_host_ports(apply_unassigning=False):
+ return None
+
+ self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
+ 'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
+ host = NetAppESeriesHost()
+ self.assertEquals(host.build_success_payload(), {'api_url': 'http://localhost/', 'ssid': '1'})
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_hostgroup.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_hostgroup.py
new file mode 100644
index 000000000..6cecf0e8c
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_hostgroup.py
@@ -0,0 +1,140 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_hostgroup import NetAppESeriesHostGroup
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class HostTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1"}
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_hostgroup.NetAppESeriesHostGroup.request"
+ HOSTS_GET_RESPONSE = [
+ {"hostRef": "84000000600A098000A4B28D0030102E5C3DFC0F",
+ "clusterRef": "85000000600A098000A4B28D0036102C5C3DFC08", "id": "84000000600A098000A4B28D0030102E5C3DFC0F",
+ "name": "host1"},
+ {"hostRef": "84000000600A098000A4B28D003010315C3DFC11",
+ "clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "84000000600A098000A4B28D003010315C3DFC11",
+ "name": "host2"},
+ {"hostRef": "84000000600A098000A4B28D003010345C3DFC14",
+ "clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "84000000600A098000A4B28D003010345C3DFC14",
+ "name": "host3"}]
+ HOSTGROUPS_GET_RESPONSE = [
+ {"clusterRef": "85000000600A098000A4B28D0036102C5C3DFC08", "id": "85000000600A098000A4B28D0036102C5C3DFC08",
+ "name": "group1"},
+ {"clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "85000000600A098000A4B9D100360F765C3DFC1C",
+ "name": "group2"},
+ {"clusterRef": "85000000600A098000A4B9D100360F775C3DFC1E", "id": "85000000600A098000A4B9D100360F775C3DFC1E",
+ "name": "group3"}]
+
+ def _set_args(self, args):
+ self.module_args = self.REQUIRED_PARAMS.copy()
+ self.module_args.update(args)
+ set_module_args(self.module_args)
+
+ def test_hosts_fail(self):
+ """Ensure that the host property method fails when self.request throws an exception."""
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleFailJson):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ hosts = hostgroup_object.hosts
+
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [])):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Expected host does not exist"):
+ hosts = hostgroup_object.hosts
+
+ def test_hosts_pass(self):
+ """Evaluate hosts property method for valid returned data structure."""
+ expected_host_list = ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D0030102E5C3DFC0F']
+ for hostgroup_hosts in [["host1", "host2"], ["84000000600A098000A4B28D0030102E5C3DFC0F",
+ "84000000600A098000A4B28D003010315C3DFC11"]]:
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": hostgroup_hosts})
+ hostgroup_object = NetAppESeriesHostGroup()
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.HOSTS_GET_RESPONSE)):
+ for item in hostgroup_object.hosts:
+ self.assertTrue(item in expected_host_list)
+
+ # Create hostgroup with no hosts
+ self._set_args({"state": "present", "name": "hostgroup1"})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [])):
+ self.assertEqual(hostgroup_object.hosts, [])
+
+ def test_host_groups_fail(self):
+ """Ensure that the host_groups property method fails when self.request throws an exception."""
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleFailJson):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ host_groups = hostgroup_object.host_groups
+
+ def test_host_groups_pass(self):
+ """Evaluate host_groups property method for valid return data structure."""
+ expected_groups = [
+ {'hosts': ['84000000600A098000A4B28D0030102E5C3DFC0F'], 'id': '85000000600A098000A4B28D0036102C5C3DFC08',
+ 'name': 'group1'},
+ {'hosts': ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D003010345C3DFC14'],
+ 'id': '85000000600A098000A4B9D100360F765C3DFC1C', 'name': 'group2'},
+ {'hosts': [], 'id': '85000000600A098000A4B9D100360F775C3DFC1E', 'name': 'group3'}]
+
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+
+ with mock.patch(self.REQ_FUNC,
+ side_effect=[(200, self.HOSTGROUPS_GET_RESPONSE), (200, self.HOSTS_GET_RESPONSE)]):
+ self.assertEqual(hostgroup_object.host_groups, expected_groups)
+
+ @mock.patch.object(NetAppESeriesHostGroup, "host_groups")
+ @mock.patch.object(NetAppESeriesHostGroup, "hosts")
+ @mock.patch.object(NetAppESeriesHostGroup, "create_host_group")
+ @mock.patch.object(NetAppESeriesHostGroup, "update_host_group")
+ @mock.patch.object(NetAppESeriesHostGroup, "delete_host_group")
+ def test_apply_pass(self, fake_delete_host_group, fake_update_host_group, fake_create_host_group, fake_hosts,
+ fake_host_groups):
+ """Apply desired host group state to the storage array."""
+ hosts_response = ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D0030102E5C3DFC0F']
+ host_groups_response = [
+ {'hosts': ['84000000600A098000A4B28D0030102E5C3DFC0F'], 'id': '85000000600A098000A4B28D0036102C5C3DFC08',
+ 'name': 'group1'},
+ {'hosts': ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D003010345C3DFC14'],
+ 'id': '85000000600A098000A4B9D100360F765C3DFC1C', 'name': 'group2'},
+ {'hosts': [], 'id': '85000000600A098000A4B9D100360F775C3DFC1E', 'name': 'group3'}]
+
+ fake_host_groups.return_value = host_groups_response
+ fake_hosts.return_value = hosts_response
+ fake_create_host_group.return_value = lambda x: "Host group created!"
+ fake_update_host_group.return_value = lambda x: "Host group updated!"
+ fake_delete_host_group.return_value = lambda x: "Host group deleted!"
+
+ # Test create new host group
+ self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleExitJson):
+ hostgroup_object.apply()
+
+ # Test make no changes to existing host group
+ self._set_args({"state": "present", "name": "group1", "hosts": ["host1"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleExitJson):
+ hostgroup_object.apply()
+
+ # Test add host to existing host group
+ self._set_args({"state": "present", "name": "group1", "hosts": ["host1", "host2"]})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleExitJson):
+ hostgroup_object.apply()
+
+ # Test delete existing host group
+ self._set_args({"state": "absent", "name": "group1"})
+ hostgroup_object = NetAppESeriesHostGroup()
+ with self.assertRaises(AnsibleExitJson):
+ hostgroup_object.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ib_iser_interface.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ib_iser_interface.py
new file mode 100644
index 000000000..d2eca39f2
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ib_iser_interface.py
@@ -0,0 +1,159 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_ib_iser_interface import NetAppESeriesIbIserInterface
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NvmeInterfaceTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1",
+ "controller": "A",
+ "channel": 1}
+
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_ib_iser_interface.NetAppESeriesIbIserInterface.request"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_invalid_options_fail(self):
+ """Verify invalid options fail."""
+ options_list = [{"address": "nonaddress@somewhere.com"},
+ {"address": "192.168.100.1000"},
+ {"address": "1192.168.100.100"}]
+
+ for options in options_list:
+ self._set_args(options)
+ with self.assertRaisesRegexp(AnsibleFailJson, "An invalid ip address was provided for address."):
+ iface = NetAppESeriesIbIserInterface()
+
+ def test_get_interfaces_pass(self):
+ """Verify get_interfaces method passes."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"interfaceType": "iscsi", "iscsi": {"interfaceData": {"type": "infiniband",
+ "infinibandData": {"isIser": True}}}},
+ {"interfaceType": "iscsi", "iscsi": {"interfaceData": {"type": "infiniband",
+ "infinibandData": {"isIser": True}}}},
+ {"interfaceType": "fc", "fc": {}}])):
+ self.assertEquals(iface.get_interfaces(),
+ [{'interfaceType': 'iscsi', 'iscsi': {'interfaceData': {'type': 'infiniband', 'infinibandData': {'isIser': True}}}},
+ {'interfaceType': 'iscsi', 'iscsi': {'interfaceData': {'type': 'infiniband', 'infinibandData': {'isIser': True}}}}])
+
+ def test_get_interfaces_fails(self):
+ """Verify get_interfaces method throws expected exceptions."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve defined host interfaces."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ iface.get_interfaces()
+
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to detect any InfiniBand iSER interfaces!"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"interfaceType": "eth", "eth": {"interfaceData": {"type": "ethernet",
+ "infinibandData": {"isIser": False}}}},
+ {"interfaceType": "iscsi", "iscsi": {"interfaceData": {"type": "infiniband",
+ "infinibandData": {"isIser": False}}}},
+ {"interfaceType": "fc", "fc": {}}])):
+ iface.get_interfaces()
+
+ def test_get_ib_link_status_pass(self):
+ """Verify expected data structure."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"ibPorts": [{"channelPortRef": 1, "linkState": "active"},
+ {"channelPortRef": 2, "linkState": "down"},
+ {"channelPortRef": 3, "linkState": "down"},
+ {"channelPortRef": 4, "linkState": "active"}]})):
+ self.assertEquals(iface.get_ib_link_status(), {1: 'active', 2: 'down', 3: 'down', 4: 'active'})
+
+ def test_get_ib_link_status_fail(self):
+ """Verify expected exception is thrown."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve ib link status information!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ statuses = iface.get_ib_link_status()
+
+ def test_is_change_required_pass(self):
+ """Verify is_change_required method returns expected values."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.get_target_interface = lambda: {"iscsi": {"ipv4Data": {"ipv4AddressData": {"ipv4Address": "192.168.1.1"}}}}
+ self.assertTrue(iface.is_change_required())
+
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.get_target_interface = lambda: {"iscsi": {"ipv4Data": {"ipv4AddressData": {"ipv4Address": "192.168.100.100"}}}}
+ self.assertFalse(iface.is_change_required())
+
+ def test_make_request_body_pass(self):
+ """Verify expected request body."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.get_target_interface = lambda: {"iscsi": {"id": "1234", "ipv4Data": {"ipv4AddressData": {"ipv4Address": "192.168.1.1"}}}}
+ self.assertEquals(iface.make_request_body(), {"iscsiInterface": "1234",
+ "settings": {"tcpListenPort": [],
+ "ipv4Address": ["192.168.100.100"],
+ "ipv4SubnetMask": [],
+ "ipv4GatewayAddress": [],
+ "ipv4AddressConfigMethod": [],
+ "maximumFramePayloadSize": [],
+ "ipv4VlanId": [],
+ "ipv4OutboundPacketPriority": [],
+ "ipv4Enabled": [],
+ "ipv6Enabled": [],
+ "ipv6LocalAddresses": [],
+ "ipv6RoutableAddresses": [],
+ "ipv6PortRouterAddress": [],
+ "ipv6AddressConfigMethod": [],
+ "ipv6OutboundPacketPriority": [],
+ "ipv6VlanId": [],
+ "ipv6HopLimit": [],
+ "ipv6NdReachableTime": [],
+ "ipv6NdRetransmitTime": [],
+ "ipv6NdStaleTimeout": [],
+ "ipv6DuplicateAddressDetectionAttempts": [],
+ "maximumInterfaceSpeed": []}})
+
+ def test_update_pass(self):
+ """Verify update method behavior."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.is_change_required = lambda: False
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes were required."):
+ iface.update()
+
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.is_change_required = lambda: True
+ iface.check_mode = True
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes were required."):
+ iface.update()
+
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.is_change_required = lambda: True
+ iface.make_request_body = lambda: {}
+ with self.assertRaisesRegexp(AnsibleExitJson, "The interface settings have been updated."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ iface.update()
+
+ def test_update_fail(self):
+ """Verify exceptions are thrown."""
+ self._set_args({"address": "192.168.100.100"})
+ iface = NetAppESeriesIbIserInterface()
+ iface.is_change_required = lambda: True
+ iface.make_request_body = lambda: {}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to modify the interface!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ iface.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_interface.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_interface.py
new file mode 100644
index 000000000..de9617e6d
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_interface.py
@@ -0,0 +1,239 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_iscsi_interface import NetAppESeriesIscsiInterface
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class IscsiInterfaceTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1',
+ 'state': 'disabled',
+ 'port': 1,
+ 'controller': 'A',
+ }
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_iscsi_interface.NetAppESeriesIscsiInterface.request'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_validate_params(self):
+ """Ensure we can pass valid parameters to the module"""
+ # Provide a range of valid values for each
+ for controller in ['A', 'B']:
+ for i in range(1, 10):
+ for mtu in [1500, 2500, 9000]:
+ self._set_args(dict(
+ state='disabled',
+ port=i,
+ controller=controller,
+ mtu=mtu,
+ ))
+ iface = NetAppESeriesIscsiInterface()
+
+ def test_invalid_params(self):
+ """Ensure that our input validation catches invalid parameters"""
+
+ # Currently a 'C' controller is invalid
+ self._set_args(dict(
+ state='disabled',
+ port=1,
+ controller="C",
+ ))
+ with self.assertRaises(AnsibleFailJson) as result:
+ iface = NetAppESeriesIscsiInterface()
+
+ # Each of these mtu values are invalid
+ for mtu in [500, 1499, 9001]:
+ self._set_args({
+ 'state': 'disabled',
+ 'port': 1,
+ 'controller': 'A',
+ 'mtu': mtu
+ })
+ with self.assertRaises(AnsibleFailJson) as result:
+ iface = NetAppESeriesIscsiInterface()
+
+ def test_interfaces(self):
+ """Validate that we are processing the interface list properly"""
+ self._set_args()
+ interfaces = [{"interfaceType": "iscsi", "iscsi": {"interfaceData": {"type": "ethernet"}}},
+ {"interfaceType": "iscsi", "iscsi": {"interfaceData": {"type": "ethernet"}}},
+ {"interfaceType": "fc", "iscsi": {"interfaceData": {"type": "ethernet"}}}]
+
+ # Ensure we filter out anything without an interfaceType of iscsi
+ expected = [iface for iface in interfaces if iface['interfaceType'] == 'iscsi']
+
+ # We expect a single call to the API: retrieve the list of interfaces from the objectGraph.
+ with mock.patch(self.REQ_FUNC, return_value=(200, interfaces)):
+ iface = NetAppESeriesIscsiInterface()
+ interfaces = iface.interfaces
+ self.assertEquals(interfaces, expected)
+
+ def test_interfaces_fail(self):
+ """Ensure we fail gracefully on an error to retrieve the interfaces"""
+ self._set_args()
+
+ with self.assertRaises(AnsibleFailJson) as result:
+ # Simulate a failed call to the API
+ with mock.patch(self.REQ_FUNC, side_effect=Exception("Failure")):
+ iface = NetAppESeriesIscsiInterface()
+ interfaces = iface.interfaces
+
+ def test_get_target_interface_bad_port(self):
+ """Ensure we fail correctly when a bad port is provided"""
+ self._set_args()
+
+ interfaces = [{"iscsi": {"port": 1, "controllerId": "1"}}]
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Invalid controller.*?iSCSI port."):
+ with mock.patch.object(NetAppESeriesIscsiInterface, 'interfaces', return_value=interfaces):
+ iface = NetAppESeriesIscsiInterface()
+ interfaces = iface.get_target_interface()
+
+ def test_make_update_body_dhcp(self):
+ """Ensure the update body generates correctly for a transition from static to dhcp"""
+ self._set_args(dict(state='enabled',
+ config_method='dhcp')
+ )
+
+ iface = {"iscsi": {"id": 1,
+ "ipv4Enabled": False,
+ "ipv4Data": {"ipv4AddressData": {"ipv4Address": "0.0.0.0",
+ "ipv4SubnetMask": "0.0.0.0",
+ "ipv4GatewayAddress": "0.0.0.0"},
+ "ipv4AddressConfigMethod": "configStatic"},
+ "interfaceData": {"ethernetData": {"maximumFramePayloadSize": 1500}}}}
+
+ # Test a transition from static to dhcp
+ inst = NetAppESeriesIscsiInterface()
+ update, body = inst.make_update_body(iface)
+ self.assertTrue(update, msg="An update was expected!")
+ self.assertEquals(body['settings']['ipv4Enabled'][0], True)
+ self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configDhcp')
+
+ def test_make_update_body_static(self):
+ """Ensure the update body generates correctly for a transition from dhcp to static"""
+ iface = {"iscsi": {"id": 1,
+ "ipv4Enabled": False,
+ "ipv4Data": {"ipv4AddressData": {"ipv4Address": "0.0.0.0",
+ "ipv4SubnetMask": "0.0.0.0",
+ "ipv4GatewayAddress": "0.0.0.0"},
+ "ipv4AddressConfigMethod": "configDhcp"},
+ "interfaceData": {"ethernetData": {"maximumFramePayloadSize": 1500}}}}
+
+ self._set_args(dict(state='enabled',
+ config_method='static',
+ address='10.10.10.10',
+ subnet_mask='255.255.255.0',
+ gateway='1.1.1.1'))
+
+ inst = NetAppESeriesIscsiInterface()
+ update, body = inst.make_update_body(iface)
+ self.assertTrue(update, msg="An update was expected!")
+ self.assertEquals(body['settings']['ipv4Enabled'][0], True)
+ self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configStatic')
+ self.assertEquals(body['settings']['ipv4Address'][0], '10.10.10.10')
+ self.assertEquals(body['settings']['ipv4SubnetMask'][0], '255.255.255.0')
+ self.assertEquals(body['settings']['ipv4GatewayAddress'][0], '1.1.1.1')
+
+ CONTROLLERS = dict(A='1', B='2')
+
+ def test_update_bad_controller(self):
+ """Ensure a bad controller fails gracefully"""
+ self._set_args(dict(controller='B'))
+
+ inst = NetAppESeriesIscsiInterface()
+ with self.assertRaises(AnsibleFailJson) as result:
+ with mock.patch.object(inst, 'get_controllers', return_value=dict(A='1')) as get_controllers:
+ inst.update()
+
+ @mock.patch.object(NetAppESeriesIscsiInterface, 'get_controllers', return_value=CONTROLLERS)
+ def test_update(self, get_controllers):
+ """Validate the good path"""
+ self._set_args()
+
+ inst = NetAppESeriesIscsiInterface()
+ with self.assertRaises(AnsibleExitJson):
+ with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
+ inst.update()
+ request.assert_called_once()
+
+ @mock.patch.object(NetAppESeriesIscsiInterface, 'get_controllers', return_value=CONTROLLERS)
+ def test_update_not_required(self, get_controllers):
+ """Ensure we don't trigger the update if one isn't required or if check mode is enabled"""
+ self._set_args()
+
+ # make_update_body will report that no change is required, so we should see no call to the API.
+ inst = NetAppESeriesIscsiInterface()
+ with self.assertRaises(AnsibleExitJson) as result:
+ with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ with mock.patch.object(inst, 'make_update_body', return_value=(False, {})):
+ inst.update()
+ request.assert_not_called()
+ self.assertFalse(result.exception.args[0]['changed'], msg="No change was expected.")
+
+ # Since check_mode is enabled, we will run everything normally, but not make a request to the API
+ # to perform the actual change.
+ inst = NetAppESeriesIscsiInterface()
+ inst.check_mode = True
+ with self.assertRaises(AnsibleExitJson) as result:
+ with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
+ inst.update()
+ request.assert_not_called()
+ self.assertTrue(result.exception.args[0]['changed'], msg="A change was expected.")
+
+ @mock.patch.object(NetAppESeriesIscsiInterface, 'get_controllers', return_value=CONTROLLERS)
+ def test_update_fail_busy(self, get_controllers):
+ """Ensure we fail correctly on receiving a busy response from the API."""
+ self._set_args()
+
+ inst = NetAppESeriesIscsiInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, r".*?busy.*") as result:
+ with mock.patch(self.REQ_FUNC, return_value=(422, dict(retcode="3"))) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
+ inst.update()
+ request.assert_called_once()
+
+ @mock.patch.object(NetAppESeriesIscsiInterface, 'get_controllers', return_value=CONTROLLERS)
+ @mock.patch.object(NetAppESeriesIscsiInterface, 'make_update_body', return_value=(True, {}))
+ def test_update_fail(self, get_controllers, make_body):
+ """Ensure we fail correctly on receiving a normal failure from the API."""
+ self._set_args()
+
+ inst = NetAppESeriesIscsiInterface()
+ # Test a 422 error with a non-busy status
+ with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
+ with mock.patch(self.REQ_FUNC, return_value=(422, mock.MagicMock())) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ inst.update()
+ request.assert_called_once()
+
+ # Test a 401 (authentication) error
+ with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
+ with mock.patch(self.REQ_FUNC, return_value=(401, mock.MagicMock())) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ inst.update()
+ request.assert_called_once()
+
+ # Test with a connection failure
+ with self.assertRaisesRegexp(AnsibleFailJson, r".*?Connection failure.*") as result:
+ with mock.patch(self.REQ_FUNC, side_effect=Exception()) as request:
+ with mock.patch.object(inst, 'get_target_interface', side_effect=[{}, mock.MagicMock()]):
+ inst.update()
+ request.assert_called_once()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_target.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_target.py
new file mode 100644
index 000000000..93ccafe47
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_iscsi_target.py
@@ -0,0 +1,188 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_iscsi_target import NetAppESeriesIscsiTarget
+from units.modules.utils import AnsibleFailJson, AnsibleExitJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class IscsiTargetTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "admin", "api_password": "adminpassword", "api_url": "http://localhost", "ssid": "1", "name": "abc"}
+ CHAP_SAMPLE = "a" * 14
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_iscsi_target.NetAppESeriesIscsiTarget.request"
+ TARGET_REQUEST_RESPONSE = [{"targetRef": "90000000600A098000A4B28D00334A065DA9D747",
+ "nodeName": {"ioInterfaceType": "iscsi",
+ "iscsiNodeName": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005da9d744",
+ "remoteNodeWWN": None, "nvmeNodeName": None},
+ "alias": {"ioInterfaceType": "iscsi",
+ "iscsiAlias": "target_name"},
+ "configuredAuthMethods": {"authMethodData": [{"authMethod": "none",
+ "chapSecret": None}]},
+ "portals": [{"groupTag": 2,
+ "ipAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.10.10.110",
+ "ipv6Address": None},
+ "tcpListenPort": 3260},
+ {"groupTag": 2,
+ "ipAddress": {"addressType": "ipv6",
+ "ipv4Address": None,
+ "ipv6Address": "FE8000000000000002A098FFFEA4B9D7"},
+ "tcpListenPort": 3260},
+ {"groupTag": 2,
+ "ipAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.10.10.112",
+ "ipv6Address": None},
+ "tcpListenPort": 3260},
+ {"groupTag": 1, "ipAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.10.11.110",
+ "ipv6Address": None},
+ "tcpListenPort": 3260},
+ {"groupTag": 1,
+ "ipAddress": {"addressType": "ipv6",
+ "ipv4Address": None,
+ "ipv6Address": "FE8000000000000002A098FFFEA4B293"},
+ "tcpListenPort": 3260},
+ {"groupTag": 1,
+ "ipAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.10.11.112",
+ "ipv6Address": None},
+ "tcpListenPort": 3260}]}]
+ ISCSI_ENTRY_DATA_RESPONSE = [{"icmpPingResponseEnabled": False,
+ "unnamedDiscoverySessionsEnabled": False,
+ "isnsServerTcpListenPort": 0,
+ "ipv4IsnsServerAddressConfigMethod": "configDhcp",
+ "ipv4IsnsServerAddress": "0.0.0.0",
+ "ipv6IsnsServerAddressConfigMethod": "configStatic",
+ "ipv6IsnsServerAddress": "00000000000000000000000000000000",
+ "isnsRegistrationState": "__UNDEFINED",
+ "isnsServerRegistrationEnabled": False,
+ "hostPortsConfiguredDHCP": False}]
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_validate_params(self):
+ """Ensure we can pass valid parameters to the module"""
+ for i in range(12, 57):
+ secret = 'a' * i
+ self._set_args(dict(chap=secret))
+ tgt = NetAppESeriesIscsiTarget()
+
+ def test_invalid_chap_secret(self):
+ for secret in [11 * 'a', 58 * 'a']:
+ with self.assertRaisesRegexp(AnsibleFailJson, r'.*?CHAP secret is not valid.*') as result:
+ self._set_args(dict(chap=secret))
+ tgt = NetAppESeriesIscsiTarget()
+
+ def test_target_pass(self):
+ """Ensure target property returns the expected data structure."""
+ expected_response = {"alias": "target_name", "chap": False, "iqn": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005da9d744",
+ "ping": False, "unnamed_discovery": False}
+
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE)]):
+ self.assertEquals(iscsi_target.target, expected_response)
+
+ def test_target_fail(self):
+ """Ensure target property returns the expected data structure."""
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the iSCSI target information."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ result = iscsi_target.target
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the iSCSI target information."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), Exception()]):
+ result = iscsi_target.target
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"This storage-system does not appear to have iSCSI interfaces."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, [])):
+ result = iscsi_target.target
+
+ def test_apply_iscsi_settings_pass(self):
+ """Ensure apply_iscsi_settings succeeds properly."""
+ self._set_args({"name": "not_target_name"})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertTrue(iscsi_target.apply_iscsi_settings())
+
+ self._set_args({"name": "target_name"})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertFalse(iscsi_target.apply_iscsi_settings())
+
+ def test_apply_iscsi_settings_fail(self):
+ """Ensure apply_iscsi_settings fails properly."""
+ self._set_args({"name": "not_target_name"})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to update the iSCSI target settings."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), Exception()]):
+ self.assertTrue(iscsi_target.apply_iscsi_settings())
+
+ def test_apply_target_changes_pass(self):
+ """Ensure apply_iscsi_settings succeeds properly."""
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertTrue(iscsi_target.apply_target_changes())
+
+ self._set_args({"name": "target_name", "ping": False, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertTrue(iscsi_target.apply_target_changes())
+
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": False})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertTrue(iscsi_target.apply_target_changes())
+
+ self._set_args({"name": "target_name", "ping": False, "unnamed_discovery": False})
+ iscsi_target = NetAppESeriesIscsiTarget()
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), (200, [])]):
+ self.assertFalse(iscsi_target.apply_target_changes())
+
+ def test_apply_target_changes_fail(self):
+ """Ensure apply_iscsi_settings fails properly."""
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to update the iSCSI target settings."):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE), Exception()]):
+ iscsi_target.apply_target_changes()
+
+ def test_update_pass(self):
+ """Ensure update successfully exists."""
+ self._set_args({"name": "target_name", "ping": True, "unnamed_discovery": True})
+ iscsi_target = NetAppESeriesIscsiTarget()
+
+ iscsi_target.apply_iscsi_settings = lambda: True
+ iscsi_target.apply_target_changes = lambda: True
+ with self.assertRaisesRegexp(AnsibleExitJson, r"\'changed\': True"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE)]):
+ iscsi_target.update()
+
+ iscsi_target.apply_iscsi_settings = lambda: False
+ iscsi_target.apply_target_changes = lambda: True
+ with self.assertRaisesRegexp(AnsibleExitJson, r"\'changed\': True"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE)]):
+ iscsi_target.update()
+
+ iscsi_target.apply_iscsi_settings = lambda: True
+ iscsi_target.apply_target_changes = lambda: False
+ with self.assertRaisesRegexp(AnsibleExitJson, r"\'changed\': True"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE)]):
+ iscsi_target.update()
+
+ iscsi_target.apply_iscsi_settings = lambda: False
+ iscsi_target.apply_target_changes = lambda: False
+ with self.assertRaisesRegexp(AnsibleExitJson, r"\'changed\': False"):
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TARGET_REQUEST_RESPONSE), (200, self.ISCSI_ENTRY_DATA_RESPONSE)]):
+ iscsi_target.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ldap.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ldap.py
new file mode 100644
index 000000000..69bf26742
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_ldap.py
@@ -0,0 +1,371 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_ldap import NetAppESeriesLdap
+from units.modules.utils import ModuleTestCase, set_module_args, AnsibleFailJson, AnsibleExitJson
+from units.compat import mock
+
+
+class LdapTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ "api_username": "admin",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1"}
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_ldap.NetAppESeriesLdap.request"
+ BASE_REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request"
+
+ GET_DOMAINS = {"version": "3",
+ "ldapDomains": [{"id": "test1",
+ "bindLookupUser": {"password": "***", "user": "CN=cn,OU=accounts,DC=test1,DC=example,DC=com"},
+ "groupAttributes": ["memberOf"],
+ "ldapUrl": "ldap://test.example.com:389",
+ "names": ["test.example.com"],
+ "roleMapCollection": [{"groupRegex": ".*", "ignoreCase": False, "name": "storage.monitor"}],
+ "searchBase": "OU=accounts,DC=test,DC=example,DC=com",
+ "userAttribute": "sAMAccountName"},
+ {"id": "test2",
+ "bindLookupUser": {"password": "***", "user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com"},
+ "groupAttributes": ["memberOf"],
+ "ldapUrl": "ldap://test2.example.com:389",
+ "names": ["test2.example.com"],
+ "roleMapCollection": [{"groupRegex": ".*", "ignoreCase": False, "name": "storage.admin"},
+ {"groupRegex": ".*", "ignoreCase": False, "name": "support.admin"},
+ {"groupRegex": ".*", "ignoreCase": False, "name": "security.admin"},
+ {"groupRegex": ".*", "ignoreCase": False, "name": "storage.monitor"}],
+ "searchBase": "OU=accounts,DC=test2,DC=example,DC=com",
+ "userAttribute": "sAMAccountName"}]}
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Verify valid options."""
+ options_list = [{"state": "disabled"},
+ {"state": "absent", "identifier": "test_domain"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass",
+ "names": ["name1", "name2"], "group_attributes": ["group_attr1", "group_attr1"], "user_attribute": "user_attr"}]
+
+ for options in options_list:
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args(options)
+ ldap = NetAppESeriesLdap()
+ for options in options_list:
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": False})]):
+ self._set_args(options)
+ ldap = NetAppESeriesLdap()
+
+ def test_get_domain_pass(self):
+ """Verify get_domain returns expected data structure."""
+ options = {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass",
+ "names": ["name1", "name2"], "group_attributes": ["group_attr1", "group_attr1"], "user_attribute": "user_attr"}
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.GET_DOMAINS)):
+ self._set_args(options)
+ ldap = NetAppESeriesLdap()
+ self.assertEquals(ldap.get_domains(), self.GET_DOMAINS["ldapDomains"])
+
+ def test_get_domain_fail(self):
+ """Verify get_domain throws expected exceptions."""
+ options = {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass",
+ "names": ["name1", "name2"], "group_attributes": ["group_attr1", "group_attr1"], "user_attribute": "user_attr"}
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve current LDAP configuration."):
+ self._set_args(options)
+ ldap = NetAppESeriesLdap()
+ ldap.get_domains()
+
+ def test_build_request_body_pass(self):
+ """Verify build_request_body builds expected data structure."""
+ options_list = [{"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass",
+ "names": ["name1", "name2"], "group_attributes": ["group_attr1", "group_attr1"], "user_attribute": "user_attr"}]
+ expectation_list = [{'id': 'test_domain', 'groupAttributes': ['memberOf'], 'ldapUrl': 'ldap://test.example.com:389', 'names': ['test.example.com'],
+ 'roleMapCollection': [], 'searchBase': 'ou=accounts,DC=test,DC=example,DC=com', 'userAttribute': 'sAMAccountName'},
+ {'id': 'test_domain', 'groupAttributes': ['memberOf'], 'ldapUrl': 'ldap://test.example.com:389', 'names': ['test.example.com'],
+ 'roleMapCollection': [], 'searchBase': 'ou=accounts,DC=test,DC=example,DC=com', 'userAttribute': 'sAMAccountName',
+ 'bindLookupUser': {'password': 'adminpass', 'user': 'admin'}},
+ {'id': 'test_domain', 'groupAttributes': ['group_attr1', 'group_attr1'], 'ldapUrl': 'ldap://test.example.com:389',
+ 'names': ['name1', 'name2'], 'roleMapCollection': [], 'searchBase': 'ou=accounts,DC=test,DC=example,DC=com',
+ 'userAttribute': 'user_attr', 'bindLookupUser': {'password': 'adminpass', 'user': 'admin'}}]
+ for index in range(len(options_list)):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args(options_list[index])
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ self.assertEquals(ldap.body, expectation_list[index])
+
+ def test_are_changes_required_pass(self):
+ """Verify build_request_body builds expected data structure."""
+ options_list = [{"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass"},
+ {"state": "present", "identifier": "test_domain", "server_url": "ldap://test.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com", "bind_user": "admin", "bind_password": "adminpass",
+ "names": ["name1", "name2"], "group_attributes": ["group_attr1", "group_attr1"], "user_attribute": "user_attr"}]
+
+ for index in range(len(options_list)):
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args(options_list[index])
+ ldap = NetAppESeriesLdap()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ self.assertTrue(ldap.are_changes_required())
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "disabled"})
+ ldap = NetAppESeriesLdap()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ self.assertTrue(ldap.are_changes_required())
+ self.assertEquals(ldap.existing_domain_ids, ["test1", "test2"])
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "absent", "identifier": "test_domain"})
+ ldap = NetAppESeriesLdap()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ self.assertFalse(ldap.are_changes_required())
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test2,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ ldap.add_domain = lambda temporary, skip_test: {"id": "ANSIBLE_TMP_DOMAIN"}
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"id": "test2", "result": {"authenticationTestResult": "ok"}},
+ {"id": "ANSIBLE_TMP_DOMAIN", "result": {"authenticationTestResult": "ok"}}])):
+ self.assertFalse(ldap.are_changes_required())
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ ldap.add_domain = lambda temporary, skip_test: {"id": "ANSIBLE_TMP_DOMAIN"}
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"id": "test2", "result": {"authenticationTestResult": "fail"}},
+ {"id": "ANSIBLE_TMP_DOMAIN", "result": {"authenticationTestResult": "ok"}}])):
+ self.assertTrue(ldap.are_changes_required())
+
+ def test_are_changes_required_fail(self):
+ """Verify are_changes_required throws expected exception."""
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test2,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ ldap.add_domain = lambda temporary, skip_test: {"id": "ANSIBLE_TMP_DOMAIN"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to authenticate bind credentials!"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"id": "test2", "result": {"authenticationTestResult": "fail"}},
+ {"id": "ANSIBLE_TMP_DOMAIN", "result": {"authenticationTestResult": "fail"}}])):
+ ldap.are_changes_required()
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test2,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.get_domains = lambda: self.GET_DOMAINS["ldapDomains"]
+ ldap.add_domain = lambda temporary, skip_test: {"id": "ANSIBLE_TMP_DOMAIN"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to authenticate bind credentials!"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, [{"id": "test2", "result": {"authenticationTestResult": "ok"}},
+ {"id": "ANSIBLE_TMP_DOMAIN", "result": {"authenticationTestResult": "fail"}}])):
+ ldap.are_changes_required()
+
+ def test_add_domain_pass(self):
+ """Verify add_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ with mock.patch(self.REQ_FUNC, return_value=(200, {"ldapDomains": [{"id": "test2"}]})):
+ self.assertEquals(ldap.add_domain(), {"id": "test2"})
+
+ def test_add_domain_fail(self):
+ """Verify add_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create LDAP domain."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ ldap.add_domain()
+
+ def test_update_domain_pass(self):
+ """Verify update_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.domain = {"id": "test2"}
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ ldap.update_domain()
+
+ def test_update_domain_fail(self):
+ """Verify update_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body()
+ ldap.domain = {"id": "test2"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update LDAP domain."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ ldap.update_domain()
+
+ def test_delete_domain_pass(self):
+ """Verify delete_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ ldap.delete_domain("test2")
+
+ def test_delete_domain_fail(self):
+ """Verify delete_domain returns expected data."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to delete LDAP domain."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ ldap.delete_domain("test2")
+
+ def test_disable_domains_pass(self):
+ """Verify disable_domains completes successfully."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.delete_domain = lambda x: None
+ ldap.existing_domain_ids = ["id1", "id2", "id3"]
+ ldap.disable_domains()
+
+ def test_apply_pass(self):
+ """Verify apply exits as expected."""
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body = lambda: None
+ ldap.are_changes_required = lambda: False
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes have been made to the LDAP configuration."):
+ ldap.apply()
+
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body = lambda: None
+ ldap.are_changes_required = lambda: True
+ ldap.add_domain = lambda: None
+ ldap.domain = {}
+ with self.assertRaisesRegexp(AnsibleExitJson, "LDAP domain has been added."):
+ ldap.apply()
+
+ self._set_args({"state": "present", "identifier": "test2", "server_url": "ldap://test2.example.com:389",
+ "search_base": "ou=accounts,DC=test,DC=example,DC=com",
+ "bind_user": "CN=cn,OU=accounts,DC=test2,DC=example,DC=com", "bind_password": "adminpass",
+ "role_mappings": {".*": ["storage.admin", "support.admin", "security.admin", "storage.monitor"]},
+ "names": ["test2.example.com"], "group_attributes": ["memberOf"], "user_attribute": "sAMAccountName"})
+
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body = lambda: None
+ ldap.are_changes_required = lambda: True
+ ldap.update_domain = lambda: None
+ ldap.domain = {"id": "test"}
+ with self.assertRaisesRegexp(AnsibleExitJson, "LDAP domain has been updated."):
+ ldap.apply()
+
+ self._set_args({"state": "absent", "identifier": "test2"})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body = lambda: None
+ ldap.are_changes_required = lambda: True
+ ldap.delete_domain = lambda x: None
+ ldap.domain = {"id": "test"}
+ with self.assertRaisesRegexp(AnsibleExitJson, "LDAP domain has been removed."):
+ ldap.apply()
+
+ self._set_args({"state": "disabled"})
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.10.0000.0001"}), (200, {"runningAsProxy": True})]):
+ ldap = NetAppESeriesLdap()
+ ldap.build_request_body = lambda: None
+ ldap.are_changes_required = lambda: True
+ ldap.disable_domain = lambda: None
+ ldap.domain = {"id": "test"}
+ with self.assertRaisesRegexp(AnsibleExitJson, "All LDAP domains have been removed."):
+ ldap.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_lun_mapping.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_lun_mapping.py
new file mode 100644
index 000000000..ed44e0de2
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_lun_mapping.py
@@ -0,0 +1,196 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_lun_mapping import NetAppESeriesLunMapping
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NetAppLunMappingTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1"}
+
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_lun_mapping.NetAppESeriesLunMapping.request"
+ GRAPH_RESPONSE = {"storagePoolBundle": {"host": [{"name": "host1", "hostRef": "1"},
+ {"name": "host2", "hostRef": "2"},
+ {"name": "host3", "hostRef": "3"}],
+ "cluster": [{"name": "hostgroup1", "clusterRef": "10"},
+ {"name": "hostgroup2", "clusterRef": "20"},
+ {"name": "hostgroup3", "clusterRef": "30"}],
+ "lunMapping": [{"volumeRef": "100", "mapRef": "1", "lunMappingRef": "100001", "lun": 5},
+ {"volumeRef": "200", "mapRef": "2", "lunMappingRef": "200001", "lun": 3},
+ {"volumeRef": "1000", "mapRef": "10", "lunMappingRef": "300001", "lun": 6},
+ {"volumeRef": "2000", "mapRef": "20", "lunMappingRef": "400001", "lun": 4}]},
+ "volume": [{"name": "volume1", "volumeRef": "100", "listOfMappings": [{"lun": 5}]},
+ {"name": "volume2", "volumeRef": "200", "listOfMappings": [{"lun": 3}]},
+ {"name": "volume3", "volumeRef": "300", "listOfMappings": []}],
+ "highLevelVolBundle": {"thinVolume": [{"name": "thin_volume1", "volumeRef": "1000", "listOfMappings": [{"lun": 6}]},
+ {"name": "thin_volume2", "volumeRef": "2000", "listOfMappings": [{"lun": 4}]},
+ {"name": "thin_volume3", "volumeRef": "3000", "listOfMappings": []}]},
+ "sa": {"accessVolume": {"name": "access_volume", "accessVolumeRef": "10000"}}}
+ MAPPING_INFO = {"lun_mapping": [{"volume_reference": "100", "map_reference": "1", "lun_mapping_reference": "100001", "lun": 5},
+ {"volume_reference": "200", "map_reference": "2", "lun_mapping_reference": "200001", "lun": 3},
+ {"volume_reference": "1000", "map_reference": "10", "lun_mapping_reference": "300001", "lun": 6},
+ {"volume_reference": "2000", "map_reference": "20", "lun_mapping_reference": "400001", "lun": 4}],
+ "volume_by_reference": {"100": "volume1", "200": "volume2", "300": "volume3", "1000": "thin_volume1", "2000": "thin_volume2",
+ "3000": "thin_volume3", "10000": "access_volume"},
+ "volume_by_name": {"volume1": "100", "volume2": "200", "volume3": "300", "thin_volume1": "1000", "thin_volume2": "2000",
+ "thin_volume3": "3000", "access_volume": "10000"},
+ "lun_by_name": {"volume1": 5, "volume2": 3, "thin_volume1": 6, "thin_volume2": 4},
+ "target_by_reference": {"1": "host1", "2": "host2", "3": "host3", "10": "hostgroup1", "20": "hostgroup2", "30": "hostgroup3",
+ "0000000000000000000000000000000000000000": "DEFAULT_HOSTGROUP"},
+ "target_by_name": {"host1": "1", "host2": "2", "host3": "3", "hostgroup1": "10", "hostgroup2": "20", "hostgroup3": "30",
+ "DEFAULT_HOSTGROUP": "0000000000000000000000000000000000000000"},
+ "target_type_by_name": {"host1": "host", "host2": "host", "host3": "host", "hostgroup1": "group", "hostgroup2": "group",
+ "hostgroup3": "group", "DEFAULT_HOSTGROUP": "group"}}
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_update_mapping_info_pass(self):
+ """Verify update_mapping_info method creates the correct data structure."""
+ options = {"target": "host1", "volume": "volume1"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.GRAPH_RESPONSE)):
+ mapping.update_mapping_info()
+ print("%s" % mapping.mapping_info)
+ self.assertEquals(mapping.mapping_info, self.MAPPING_INFO)
+
+ def test_update_mapping_info_fail(self):
+ """Verify update_mapping_info throws the expected exceptions."""
+ response = {"storagePoolBundle": {"host": [{"name": "host1", "hostRef": "1"},
+ {"name": "host2", "hostRef": "2"},
+ {"name": "host3", "hostRef": "3"}],
+ "cluster": [{"name": "host1", "clusterRef": "10"},
+ {"name": "hostgroup2", "clusterRef": "20"},
+ {"name": "hostgroup3", "clusterRef": "30"}]}}
+ options = {"target": "host1", "volume": "volume1"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ with mock.patch(self.REQ_FUNC, return_value=(200, response)):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Ambiguous target type: target name is used for both host and group targets!"):
+ mapping.update_mapping_info()
+
+ def test_get_lun_mapping_pass(self):
+ """Verify get_lun_mapping method creates the correct data structure."""
+ options = {"target": "host1", "volume": "volume1"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ self.assertEquals(mapping.get_lun_mapping(), (True, "100001", 5))
+
+ options = {"target": "host1", "volume": "volume1", "lun": 5}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ self.assertEquals(mapping.get_lun_mapping(), (True, "100001", 5))
+
+ options = {"target": "host1", "volume": "volume3", "lun": 10}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ self.assertEquals(mapping.get_lun_mapping(), (False, None, None))
+
+ def test_get_lun_mapping_fail(self):
+ """Verify get_lun_mapping throws the expected exceptions."""
+ options = {"target": "host1", "volume": "volume3", "lun": 5}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with self.assertRaisesRegexp(AnsibleFailJson, "Option lun value is already in use for target!"):
+ mapping.get_lun_mapping()
+
+ options = {"target": "host10", "volume": "volume3"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with self.assertRaisesRegexp(AnsibleFailJson, "Target does not exist."):
+ mapping.get_lun_mapping()
+
+ options = {"target": "host1", "volume": "volume10"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with self.assertRaisesRegexp(AnsibleFailJson, "Volume does not exist."):
+ mapping.get_lun_mapping()
+
+ def test_update_pass(self):
+ """Verify update method creates the correct data structure."""
+ options = {"target": "host1", "volume": "volume1"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ with self.assertRaises(AnsibleExitJson):
+ mapping.update()
+
+ options = {"target": "host1", "volume": "volume1", "lun": 5}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ with self.assertRaises(AnsibleExitJson):
+ mapping.update()
+
+ options = {"target": "host1", "volume": "volume3", "lun": 10}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ with self.assertRaises(AnsibleExitJson):
+ mapping.update()
+
+ options = {"target": "host1", "volume": "volume1", "lun": 10}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update storage array lun mapping."):
+ mapping.update()
+
+ def test_update_fail(self):
+ """Verify update throws the expected exceptions."""
+ options = {"target": "host3", "volume": "volume3"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update storage array lun mapping."):
+ mapping.update()
+
+ options = {"state": "absent", "target": "host1", "volume": "volume1"}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update storage array lun mapping."):
+ mapping.update()
+
+ options = {"target": "host3", "volume": "volume3", "lun": 15}
+ self._set_args(options)
+ mapping = NetAppESeriesLunMapping()
+ mapping.update_mapping_info = lambda: None
+ mapping.mapping_info = self.MAPPING_INFO
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update storage array lun mapping."):
+ mapping.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_mgmt_interface.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_mgmt_interface.py
new file mode 100644
index 000000000..7c35d40dd
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_mgmt_interface.py
@@ -0,0 +1,513 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_mgmt_interface import NetAppESeriesMgmtInterface
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class MgmtInterfaceTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ 'api_username': 'rw',
+ 'api_password': 'password',
+ 'api_url': 'http://localhost',
+ 'ssid': '1',
+ }
+
+ TEST_DATA = [
+ {"controllerRef": "070000000000000000000001",
+ "controllerSlot": 1,
+ "interfaceName": "wan0",
+ "interfaceRef": "2800070000000000000000000001000000000000",
+ "channel": 1,
+ "alias": "creG1g-AP-a",
+ "ipv4Enabled": True,
+ "ipv4Address": "10.1.1.10",
+ "linkStatus": "up",
+ "ipv4SubnetMask": "255.255.255.0",
+ "ipv4AddressConfigMethod": "configStatic",
+ "ipv4GatewayAddress": "10.1.1.1",
+ "ipv6Enabled": False,
+ "physicalLocation": {"slot": 0},
+ "dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4",
+ "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4",
+ "ipv4Address": "10.10.0.20"}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {"acquisitionProperties": {"ntpAcquisitionType": "disabled",
+ "ntpServers": None},
+ "dhcpAcquiredNtpServers": []}},
+ {"controllerRef": "070000000000000000000001",
+ "controllerSlot": 1,
+ "interfaceName": "wan1",
+ "interfaceRef": "2800070000000000000000000001000000000000",
+ "channel": 2,
+ "alias": "creG1g-AP-a",
+ "ipv4Enabled": True,
+ "linkStatus": "down",
+ "ipv4Address": "0.0.0.0",
+ "ipv4SubnetMask": "0.0.0.0",
+ "ipv4AddressConfigMethod": "configDhcp",
+ "ipv4GatewayAddress": "10.1.1.1",
+ "ipv6Enabled": False,
+ "physicalLocation": {"slot": 1},
+ "dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4",
+ "ipv4Address": "10.1.0.250",
+ "ipv6Address": None},
+ {"addressType": "ipv4",
+ "ipv4Address": "10.10.0.20",
+ "ipv6Address": None}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {"acquisitionProperties": {"ntpAcquisitionType": "disabled",
+ "ntpServers": None},
+ "dhcpAcquiredNtpServers": []}},
+ {"controllerRef": "070000000000000000000002",
+ "controllerSlot": 2,
+ "interfaceName": "wan0",
+ "interfaceRef": "2800070000000000000000000001000000000000",
+ "channel": 1,
+ "alias": "creG1g-AP-b",
+ "ipv4Enabled": True,
+ "ipv4Address": "0.0.0.0",
+ "linkStatus": "down",
+ "ipv4SubnetMask": "0.0.0.0",
+ "ipv4AddressConfigMethod": "configDhcp",
+ "ipv4GatewayAddress": "10.1.1.1",
+ "ipv6Enabled": False,
+ "physicalLocation": {"slot": 0},
+ "dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4",
+ "ipv4Address": "10.1.0.250",
+ "ipv6Address": None}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {"acquisitionProperties": {"ntpAcquisitionType": "stat",
+ "ntpServers": [{"addrType": "ipvx",
+ "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.13.1.5",
+ "ipv6Address": None}},
+ {"addrType": "ipvx",
+ "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.15.1.8",
+ "ipv6Address": None}}]},
+ "dhcpAcquiredNtpServers": []}},
+ {"controllerRef": "070000000000000000000002",
+ "controllerSlot": 2,
+ "interfaceName": "wan1",
+ "interfaceRef": "2801070000000000000000000001000000000000",
+ "channel": 2,
+ "alias": "creG1g-AP-b",
+ "ipv4Enabled": True,
+ "ipv4Address": "0.0.0.0",
+ "linkStatus": "down",
+ "ipv4SubnetMask": "0.0.0.0",
+ "ipv4AddressConfigMethod": "configDhcp",
+ "ipv4GatewayAddress": "10.1.1.1",
+ "ipv6Enabled": False,
+ "physicalLocation": {"slot": 1},
+ "dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4",
+ "ipv4Address": "10.19.1.2",
+ "ipv6Address": None}]},
+ "dhcpAcquiredDnsServers": []},
+ "ntpProperties": {"acquisitionProperties": {"ntpAcquisitionType": "stat",
+ "ntpServers": [{"addrType": "ipvx",
+ "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.13.1.5",
+ "ipv6Address": None}},
+ {"addrType": "ipvx",
+ "domainName": None,
+ "ipvxAddress": {"addressType": "ipv4",
+ "ipv4Address": "10.15.1.18",
+ "ipv6Address": None}}]},
+ "dhcpAcquiredNtpServers": []}}]
+
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_mgmt_interface.NetAppESeriesMgmtInterface.request'
+ TIME_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_mgmt_interface.sleep'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_get_controllers_pass(self):
+ """Verify dictionary return from get_controllers."""
+ initial = {
+ "state": "enabled",
+ "controller": "A",
+ "port": "1",
+ "address": "192.168.1.1",
+ "subnet_mask": "255.255.255.1",
+ "config_method": "static"}
+ controller_request = [
+ {"physicalLocation": {"slot": 2},
+ "controllerRef": "070000000000000000000002",
+ "networkSettings": {"remoteAccessEnabled": True}},
+ {"physicalLocation": {"slot": 1},
+ "controllerRef": "070000000000000000000001",
+ "networkSettings": {"remoteAccessEnabled": False}}]
+ expected = {
+ 'A': {'controllerRef': '070000000000000000000001',
+ 'controllerSlot': 1, 'ssh': False},
+ 'B': {'controllerRef': '070000000000000000000002',
+ 'controllerSlot': 2, 'ssh': True}}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, controller_request)):
+ response = mgmt_interface.get_controllers()
+ self.assertTrue(response == expected)
+
+ def test_controller_property_fail(self):
+ """Verify controllers endpoint request failure causes AnsibleFailJson exception."""
+ initial = {
+ "state": "enabled",
+ "controller": "A",
+ "port": "1",
+ "address": "192.168.1.1",
+ "subnet_mask": "255.255.255.1",
+ "config_method": "static"}
+ controller_request = [
+ {"physicalLocation": {"slot": 2},
+ "controllerRef": "070000000000000000000002",
+ "networkSettings": {"remoteAccessEnabled": True}},
+ {"physicalLocation": {"slot": 1},
+ "controllerRef": "070000000000000000000001",
+ "networkSettings": {"remoteAccessEnabled": False}}]
+ expected = {
+ 'A': {'controllerRef': '070000000000000000000001',
+ 'controllerSlot': 1, 'ssh': False},
+ 'B': {'controllerRef': '070000000000000000000002',
+ 'controllerSlot': 2, 'ssh': True}}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the controller settings."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception):
+ response = mgmt_interface.get_controllers()
+
+ def test_update_target_interface_info_pass(self):
+ """Verify return value from interface property."""
+ initial = {
+ "state": "enabled",
+ "controller": "A",
+ "port": "1",
+ "address": "192.168.1.1",
+ "subnet_mask": "255.255.255.0",
+ "config_method": "static"}
+ get_controller = {"A": {"controllerSlot": 1, "controllerRef": "070000000000000000000001", "ssh": False},
+ "B": {"controllerSlot": 2, "controllerRef": "070000000000000000000002", "ssh": True}}
+ expected = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1", "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"}, {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic", "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.get_controllers = lambda: get_controller
+
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.TEST_DATA)):
+ mgmt_interface.update_target_interface_info()
+ self.assertEquals(mgmt_interface.interface_info, expected)
+
+ def test_interface_property_request_exception_fail(self):
+ """Verify ethernet-interfaces endpoint request failure results in AnsibleFailJson exception."""
+ initial = {
+ "state": "enabled",
+ "controller": "A",
+ "port": "1",
+ "address": "192.168.1.1",
+ "subnet_mask": "255.255.255.0",
+ "config_method": "static"}
+ get_controller = {"A": {"controllerSlot": 1, "controllerRef": "070000000000000000000001", "ssh": False},
+ "B": {"controllerSlot": 2, "controllerRef": "070000000000000000000002", "ssh": True}}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.get_controllers = lambda: get_controller
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve defined management interfaces."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ mgmt_interface.update_target_interface_info()
+
+ def test_update_target_interface_info_fail(self):
+ """Verify return value from update_target_interface_info method."""
+ initial = {
+ "state": "enabled",
+ "controller": "A",
+ "port": "3",
+ "address": "192.168.1.1",
+ "subnet_mask": "255.255.255.1",
+ "config_method": "static"}
+ get_controller = {"A": {"controllerSlot": 1, "controllerRef": "070000000000000000000001", "ssh": False},
+ "B": {"controllerSlot": 2, "controllerRef": "070000000000000000000002", "ssh": True}}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.get_controllers = lambda: get_controller
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid port number! Controller .*? ports:"):
+ with mock.patch(self.REQ_FUNC, return_value=(200, self.TEST_DATA)):
+ mgmt_interface.update_target_interface_info()
+
+ def test_update_body_enable_interface_setting_pass(self):
+ """Validate update_body_enable_interface_setting updates properly."""
+ initial = {"state": "enabled", "controller": "A", "port": "1", "address": "192.168.1.1", "subnet_mask": "255.255.255.1", "config_method": "static"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": True, "id": "2800070000000000000000000001000000000000", "ssh": False}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ change_required = mgmt_interface.update_body_enable_interface_setting()
+ self.assertFalse(change_required)
+ self.assertTrue("ipv4Enabled" in mgmt_interface.body and mgmt_interface.body["ipv4Enabled"])
+
+ initial = {"state": "disabled", "controller": "A", "port": "1", "address": "192.168.1.1", "subnet_mask": "255.255.255.1", "config_method": "static"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ change_required = mgmt_interface.update_body_enable_interface_setting()
+ self.assertTrue(change_required)
+ self.assertTrue("ipv4Enabled" in mgmt_interface.body and not mgmt_interface.body["ipv4Enabled"])
+
+ def test_update_body_enable_interface_setting_fail(self):
+ """Validate update_body_enable_interface_setting throws expected exception"""
+ initial = {"state": "disabled", "controller": "A", "port": "1", "address": "192.168.1.1", "subnet_mask": "255.255.255.1", "config_method": "static"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ with self.assertRaisesRegexp(AnsibleFailJson, "Either IPv4 or IPv6 must be enabled."):
+ mgmt_interface.update_body_enable_interface_setting()
+
+ def test_update_body_interface_settings_fail(self):
+ """Validate update_body_interface_settings throws expected exception"""
+ initial = {"state": "enabled", "controller": "A", "port": "1", "address": "192.168.1.1", "subnet_mask": "255.255.255.1", "config_method": "static"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_interface_settings())
+ self.assertEquals(mgmt_interface.body, {"ipv4AddressConfigMethod": "configStatic", "ipv4Address": "192.168.1.1", "ipv4SubnetMask": "255.255.255.1"})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "address": "192.168.1.100", "subnet_mask": "255.255.255.1", "gateway": "192.168.1.1",
+ "config_method": "static"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_interface_settings())
+ self.assertEquals(mgmt_interface.body, {"ipv4AddressConfigMethod": "configStatic", "ipv4Address": "192.168.1.100", "ipv4SubnetMask": "255.255.255.1",
+ "ipv4GatewayAddress": "192.168.1.1"})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_interface_settings())
+ self.assertEquals(mgmt_interface.body, {"ipv4AddressConfigMethod": "configDhcp"})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp"}
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configDhcp",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertFalse(mgmt_interface.update_body_interface_settings())
+ self.assertEquals(mgmt_interface.body, {"ipv4AddressConfigMethod": "configDhcp"})
+
+ def test_update_body_dns_server_settings_pass(self):
+ """Validate update_body_dns_server_settings throws expected exception"""
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "dns_config_method": "dhcp"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_dns_server_settings())
+ self.assertEquals(mgmt_interface.body, {"dnsAcquisitionDescriptor": {"dnsAcquisitionType": "dhcp"}})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "dns_config_method": "static", "dns_address": "192.168.1.100"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_dns_server_settings())
+ self.assertEquals(mgmt_interface.body, {"dnsAcquisitionDescriptor": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4", "ipv4Address": "192.168.1.100"}]}})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "dns_config_method": "static", "dns_address": "192.168.1.100",
+ "dns_address_backup": "192.168.1.102"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_dns_server_settings())
+ self.assertEquals(mgmt_interface.body, {"dnsAcquisitionDescriptor": {"dnsAcquisitionType": "stat",
+ "dnsServers": [{"addressType": "ipv4", "ipv4Address": "192.168.1.100"},
+ {"addressType": "ipv4", "ipv4Address": "192.168.1.102"}]}})
+
+ def test_update_body_ntp_server_settings_pass(self):
+ """Validate update_body_ntp_server_settings throws expected exception"""
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "dhcp", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "ntp_config_method": "disabled"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_ntp_server_settings())
+ self.assertEquals(mgmt_interface.body, {"ntpAcquisitionDescriptor": {"ntpAcquisitionType": "disabled"}})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "ntp_config_method": "dhcp"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertFalse(mgmt_interface.update_body_ntp_server_settings())
+ self.assertEquals(mgmt_interface.body, {"ntpAcquisitionDescriptor": {"ntpAcquisitionType": "dhcp"}})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "ntp_config_method": "static", "ntp_address": "192.168.1.200"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_ntp_server_settings())
+ self.assertEquals(mgmt_interface.body, {"ntpAcquisitionDescriptor": {
+ "ntpAcquisitionType": "stat", "ntpServers": [{"addrType": "ipvx", "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "192.168.1.200"}}]}})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "ntp_config_method": "static", "ntp_address": "192.168.1.200",
+ "ntp_address_backup": "192.168.1.202"}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_ntp_server_settings())
+ self.assertEquals(mgmt_interface.body, {"ntpAcquisitionDescriptor": {
+ "ntpAcquisitionType": "stat", "ntpServers": [{"addrType": "ipvx", "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "192.168.1.200"}},
+ {"addrType": "ipvx", "ipvxAddress": {"addressType": "ipv4", "ipv4Address": "192.168.1.202"}}]}})
+
+ def test_update_body_ssh_setting_pass(self):
+ """Validate update_body_ssh_setting throws expected exception"""
+ interface_info = {"channel": 1, "link_status": "up", "enabled": True, "address": "10.1.1.10", "gateway": "10.1.1.1",
+ "subnet_mask": "255.255.255.0",
+ "dns_config_method": "stat",
+ "dns_servers": [{"addressType": "ipv4", "ipv4Address": "10.1.0.250"},
+ {"addressType": "ipv4", "ipv4Address": "10.10.0.20"}],
+ "ntp_config_method": "disabled", "ntp_servers": None, "config_method": "configStatic",
+ "controllerRef": "070000000000000000000001",
+ "controllerSlot": 1, "ipv6_enabled": False, "id": "2800070000000000000000000001000000000000", "ssh": False}
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp", "ssh": True}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertTrue(mgmt_interface.update_body_ssh_setting())
+ self.assertEquals(mgmt_interface.body, {"enableRemoteAccess": True})
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp", "ssh": False}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.interface_info = interface_info
+ self.assertFalse(mgmt_interface.update_body_ssh_setting())
+ self.assertEquals(mgmt_interface.body, {"enableRemoteAccess": False})
+
+ def test_update_url_pass(self):
+ """Verify update_url returns expected url."""
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp", "ssh": False}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.url = "https://192.168.1.100:8443/devmgr/v2/"
+ mgmt_interface.alt_interface_addresses = ["192.168.1.102"]
+ mgmt_interface.update_url()
+ self.assertTrue(mgmt_interface.url, "https://192.168.1.102:8443/devmgr/v2/")
+
+ def test_update_pass(self):
+ """Verify update successfully completes."""
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp", "ssh": False}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.update_request_body = lambda: False
+ mgmt_interface.is_embedded = lambda: False
+ mgmt_interface.use_alternate_address = False
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes are required."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ mgmt_interface.update()
+
+ def update_request_body():
+ update_request_body.value = not update_request_body.value
+ return update_request_body.value
+ update_request_body.value = False
+
+ initial = {"state": "enabled", "controller": "A", "port": "1", "config_method": "dhcp", "ssh": False}
+ self._set_args(initial)
+ mgmt_interface = NetAppESeriesMgmtInterface()
+ mgmt_interface.update_request_body = update_request_body
+ mgmt_interface.is_embedded = lambda: True
+ mgmt_interface.use_alternate_address = False
+ with self.assertRaisesRegexp(AnsibleExitJson, "The interface settings have been updated."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ mgmt_interface.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_nvme_interface.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_nvme_interface.py
new file mode 100644
index 000000000..aee149f0a
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_nvme_interface.py
@@ -0,0 +1,220 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_nvme_interface import NetAppESeriesNvmeInterface
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NvmeInterfaceTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ "ssid": "1",
+ "state": "enabled",
+ "controller": "A",
+ "channel": 1}
+
+ REQ_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_nvme_interface.NetAppESeriesNvmeInterface.request"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Verify valid options."""
+ valid_option_list = [{"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.255.255.0",
+ "gateway": "192.168.1.1", "mtu": 1500},
+ {"address": "192.168.1.100"},
+ {"state": "enabled", "config_method": "dhcp", "mtu": 1500},
+ {"state": "disabled"}]
+
+ for option in valid_option_list:
+ self._set_args(option)
+ nvme = NetAppESeriesNvmeInterface()
+
+ def test_invalid_options_fail(self):
+ """Verify invalid options throw expected exceptions."""
+ invalid_option_list = [{"state": "enabled", "config_method": "static", "address": "1920.168.1.100", "subnet_mask": "255.255.255.0",
+ "gateway": "192.168.1.1", "mtu": 1500},
+ {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.2550.255.0",
+ "gateway": "192.168.1.1", "mtu": 1500},
+ {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.255.255.0",
+ "gateway": "192.168..100", "mtu": 1500},
+ {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "2550.255.255.0",
+ "gateway": "192.168.1.1000", "mtu": 1500}]
+
+ for option in invalid_option_list:
+ self._set_args(option)
+ with self.assertRaises(AnsibleFailJson):
+ nvme = NetAppESeriesNvmeInterface()
+
+ def test_get_nvmeof_interfaces_pass(self):
+ """Verify get_nvmeof_interfaces method returns the expected list of interface values."""
+ options = {"address": "192.168.1.100"}
+ response = [{"controllerRef": "070000000000000000000001", "interfaceRef": "2201020000000000000000000000000000000000",
+ "ioInterfaceTypeData": {"interfaceType": "ib",
+ "ib": {"interfaceRef": "2201020000000000000000000000000000000000", "channel": 1, "linkState": "up"}},
+ "commandProtocolPropertiesList": {"commandProtocolProperties": [
+ {"commandProtocol": "nvme", "nvmeProperties": {"commandSet": "nvmeof", "nvmeofProperties": {
+ "provider": "providerInfiniband", "ibProperties": {"ipAddressData": {
+ "addressType": "ipv4", "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}}}}]}}]
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ with mock.patch(self.REQ_FUNC, return_value=(200, response)):
+ self.assertEquals(nvme.get_nvmeof_interfaces(), [
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 1, "interface_type": "ib",
+ "interface": {"interfaceRef": "2201020000000000000000000000000000000000", "channel": 1,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"}])
+
+ def test_get_nvmeof_interfaces_fail(self):
+ """Verify get_nvmeof_interfaces method throws the expected exceptions."""
+ options = {"address": "192.168.1.100"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve defined host interfaces."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ nvme.get_nvmeof_interfaces()
+
+ def test_get_target_interface_pass(self):
+ """Verify get_target_interface returns the expected interface."""
+ # options = {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.255.255.0",
+ # "gateway": "192.168.1.1", "mtu": 1500}
+ options = {"address": "192.168.1.200"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_nvmeof_interfaces = lambda: [
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5,
+ "interface_type": {"interfaceRef": "2201020000000000000000000000000000000000", "channel": 5,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"},
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.2.100"}}}},
+ "reference": "2201030000000000000000000000000000000000", "channel": 4,
+ "interface_type": {"interfaceRef": "2201030000000000000000000000000000000000", "channel": 4,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"},
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.3.100"}}}},
+ "reference": "2201040000000000000000000000000000000000", "channel": 6,
+ "interface_type": {"interfaceRef": "2201040000000000000000000000000000000000", "channel": 6,
+ "linkState": "down"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"}]
+ nvme.get_controllers = lambda: {"A": "070000000000000000000001", "B": "070000000000000000000002"}
+ self.assertEqual(nvme.get_target_interface(), {
+ "properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.2.100"}}}},
+ "reference": "2201030000000000000000000000000000000000", "channel": 4,
+ "interface_type": {"interfaceRef": "2201030000000000000000000000000000000000", "channel": 4,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"})
+
+ def test_get_target_interface_fail(self):
+ """Verify get_target_interface method throws the expected exceptions."""
+ options = {"address": "192.168.1.200", "channel": "0"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_nvmeof_interfaces = lambda: [
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5,
+ "interface_type": {"interfaceRef": "2201020000000000000000000000000000000000", "channel": 5,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"}]
+ nvme.get_controllers = lambda: {"A": "070000000000000000000001", "B": "070000000000000000000002"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid controller .*? NVMe channel."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ nvme.get_target_interface()
+
+ options = {"address": "192.168.1.200", "channel": "2"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_nvmeof_interfaces = lambda: [
+ {"properties": {"provider": "providerInfiniband", "ibProperties": {
+ "ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5,
+ "interface_type": {"interfaceRef": "2201020000000000000000000000000000000000", "channel": 5,
+ "linkState": "up"}, "controller_id": "070000000000000000000001",
+ "link_status": "up"}]
+ nvme.get_controllers = lambda: {"A": "070000000000000000000001", "B": "070000000000000000000002"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid controller .*? NVMe channel."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ nvme.get_target_interface()
+
+ def test_update_pass(self):
+ """Verify update successfully completes"""
+ # options = {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.255.255.0",
+ # "gateway": "192.168.1.1", "mtu": 1500}
+ options = {"address": "192.168.1.200"}
+ iface = {"properties": {"provider": "providerInfiniband",
+ "ibProperties": {"ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5, "interface_type": "ib", "controllerRef": "070000000000000000000001",
+ "link_status": "up"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_target_interface = lambda: iface
+ with self.assertRaisesRegexp(AnsibleExitJson, "NVMeoF interface settings have been updated."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ nvme.update()
+
+ options = {"address": "192.168.1.200"}
+ iface = {"properties": {"provider": "providerInfiniband",
+ "ibProperties": {"ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5, "interface_type": "ib", "controllerRef": "070000000000000000000001",
+ "link_status": "up"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.module.check_mode = True
+ nvme.get_target_interface = lambda: iface
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes have been made."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ nvme.update()
+
+ options = {"address": "192.168.1.100"}
+ iface = {"properties": {"provider": "providerInfiniband",
+ "ibProperties": {"ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5, "interface_type": "ib", "controllerRef": "070000000000000000000001",
+ "link_status": "up"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_target_interface = lambda: iface
+
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes have been made."):
+ with mock.patch(self.REQ_FUNC, return_value=(200, None)):
+ nvme.update()
+
+ def test_update_fail(self):
+ """Verify update throws expected exception."""
+ # options = {"state": "enabled", "config_method": "static", "address": "192.168.1.100", "subnet_mask": "255.255.255.0",
+ # "gateway": "192.168.1.1", "mtu": 1500}
+ options = {"address": "192.168.1.200"}
+ iface = {"properties": {"provider": "providerInfiniband",
+ "ibProperties": {"ipAddressData": {"addressType": "ipv4",
+ "ipv4Data": {"configState": "configured", "ipv4Address": "192.168.1.100"}}}},
+ "reference": "2201020000000000000000000000000000000000", "channel": 5, "interface_type": "ib", "controllerRef": "070000000000000000000001",
+ "link_status": "up"}
+ self._set_args(options)
+ nvme = NetAppESeriesNvmeInterface()
+ nvme.get_target_interface = lambda: iface
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to configure interface."):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ nvme.update()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_drive_firmware_upload.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_drive_firmware_upload.py
new file mode 100644
index 000000000..a527b2917
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_drive_firmware_upload.py
@@ -0,0 +1,137 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_drive_firmware_upload import NetAppESeriesProxyDriveFirmwareUpload
+from units.compat.mock import patch, mock_open
+
+
+class StoragePoolTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "validate_certs": "no"}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_drive_firmware_upload." \
+ "NetAppESeriesProxyDriveFirmwareUpload.request"
+ CREATE_MULTIPART_FORMDATA_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules." \
+ "na_santricity_proxy_drive_firmware_upload.create_multipart_formdata"
+ OS_PATH_EXISTS_FUNC = "os.path.exists"
+ OS_PATH_ISDIR_FUNC = "os.path.isdir"
+ OS_LISTDIR_FUNC = "os.listdir"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_determine_file_paths_pass(self):
+ """Ensure determine_file_paths method succeeds when all files exist."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+
+ with patch(self.OS_PATH_EXISTS_FUNC, return_value=True):
+ with patch(self.OS_PATH_ISDIR_FUNC, side_effect=[False, True]):
+ with patch(self.OS_LISTDIR_FUNC, return_value=["firmware2.dlp", "firmware3.dlp"]):
+ firmware.determine_file_paths()
+ self.assertEqual(firmware.files, {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"})
+
+ def test_determine_file_paths_fail(self):
+ """Ensure determine_file_paths method throws expected exception."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Drive firmware file does not exist!"):
+ with patch(self.OS_PATH_EXISTS_FUNC, side_effect=[True, False]):
+ firmware.determine_file_paths()
+
+ def test_determine_changes_pass(self):
+ """Determine whether determine_changes returns expected results."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+
+ with patch(self.REQUEST_FUNC, return_value=(200, [{"fileName": "firmware1.dlp"}, {"fileName": "firmware3.dlp"}, {"fileName": "firmware4.dlp"}])):
+ firmware.determine_changes()
+
+ self.assertEqual(firmware.add_files, ["firmware2.dlp"])
+ self.assertEqual(firmware.remove_files, ["firmware4.dlp"])
+
+ def test_determine_changes_fail(self):
+ """Ensure class constructor fails when file does not exist."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve proxy drive firmware file list."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.determine_changes()
+
+ def test_upload_files_pass(self):
+ """Ensure upload_files method successfully passes."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=(None, None)):
+ with patch(self.REQUEST_FUNC, return_value=(200, None)):
+ firmware.upload_files()
+
+ def test_delete_files_pass(self):
+ """Ensure delete_files completes as expected."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+ firmware.remove_files = ["firmware1.dlp", "firmware2.dlp"]
+
+ with patch(self.REQUEST_FUNC, return_value=(204, None)):
+ firmware.delete_files()
+
+ def test_apply_pass(self):
+ """Ensure that the apply method behaves as expected."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+ firmware.module.check_mode = True
+ firmware.is_proxy = lambda: True
+ firmware.determine_file_paths = lambda: None
+ firmware.determine_changes = lambda: None
+
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+ firmware.remove_files = ["firmware3.dlp", "firmware4.dlp"]
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+ firmware.remove_files = []
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = []
+ firmware.remove_files = ["firmware3.dlp", "firmware4.dlp"]
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = []
+ firmware.remove_files = []
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': False"):
+ firmware.apply()
+
+ def test_apply_fail(self):
+ """Ensure that the apply method fails when not executing against the proxy."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyDriveFirmwareUpload()
+ firmware.is_proxy = lambda: False
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Module can only be executed against SANtricity Web Services Proxy."):
+ firmware.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_firmware_upload.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_firmware_upload.py
new file mode 100644
index 000000000..72ccd6711
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_firmware_upload.py
@@ -0,0 +1,136 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_firmware_upload import NetAppESeriesProxyFirmwareUpload
+from units.compat.mock import patch, mock_open
+
+
+class StoragePoolTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "validate_certs": "no"}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_firmware_upload.NetAppESeriesProxyFirmwareUpload.request"
+ CREATE_MULTIPART_FORMDATA_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules." \
+ "na_santricity_proxy_firmware_upload.create_multipart_formdata"
+ OS_PATH_EXISTS_FUNC = "os.path.exists"
+ OS_PATH_ISDIR_FUNC = "os.path.isdir"
+ OS_LISTDIR_FUNC = "os.listdir"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_determine_file_paths_pass(self):
+ """Ensure determine_file_paths method succeeds when all files exist."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+
+ with patch(self.OS_PATH_EXISTS_FUNC, return_value=True):
+ with patch(self.OS_PATH_ISDIR_FUNC, side_effect=[False, True]):
+ with patch(self.OS_LISTDIR_FUNC, return_value=["firmware2.dlp", "firmware3.dlp"]):
+ firmware.determine_file_paths()
+ self.assertEqual(firmware.files, {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"})
+
+ def test_determine_file_paths_fail(self):
+ """Ensure determine_file_paths method throws expected exception."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Drive firmware file does not exist!"):
+ with patch(self.OS_PATH_EXISTS_FUNC, side_effect=[True, False]):
+ firmware.determine_file_paths()
+
+ def test_determine_changes_pass(self):
+ """Determine whether determine_changes returns expected results."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+
+ with patch(self.REQUEST_FUNC, return_value=(200, [{"filename": "firmware1.dlp"}, {"filename": "firmware3.dlp"}, {"filename": "firmware4.dlp"}])):
+ firmware.determine_changes()
+
+ self.assertEqual(firmware.add_files, ["firmware2.dlp"])
+ self.assertEqual(firmware.remove_files, ["firmware4.dlp"])
+
+ def test_determine_changes_fail(self):
+ """Ensure class constructor fails when file does not exist."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve current firmware file listing."):
+ with patch(self.REQUEST_FUNC, return_value=Exception()):
+ firmware.determine_changes()
+
+ def test_upload_files_pass(self):
+ """Ensure upload_files method successfully passes."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+
+ with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=(None, None)):
+ with patch(self.REQUEST_FUNC, return_value=(200, None)):
+ firmware.upload_files()
+
+ def test_delete_files_pass(self):
+ """Ensure delete_files completes as expected."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+ firmware.remove_files = ["firmware1.dlp", "firmware2.dlp"]
+
+ with patch(self.REQUEST_FUNC, return_value=(204, None)):
+ firmware.delete_files()
+
+ def test_apply_pass(self):
+ """Ensure that the apply method behaves as expected."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+ firmware.files = {"firmware1.dlp": "/path/to/firmware1.dlp",
+ "firmware2.dlp": "/path/to/firmware/directory/firmware2.dlp",
+ "firmware3.dlp": "/path/to/firmware/directory/firmware3.dlp"}
+ firmware.module.check_mode = True
+ firmware.is_proxy = lambda: True
+ firmware.determine_file_paths = lambda: None
+ firmware.determine_changes = lambda: None
+
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+ firmware.remove_files = ["firmware3.dlp", "firmware4.dlp"]
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = ["firmware1.dlp", "firmware2.dlp"]
+ firmware.remove_files = []
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = []
+ firmware.remove_files = ["firmware3.dlp", "firmware4.dlp"]
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': True"):
+ firmware.apply()
+
+ firmware.add_files = []
+ firmware.remove_files = []
+ with self.assertRaisesRegexp(AnsibleExitJson, r"'changed': False"):
+ firmware.apply()
+
+ def test_apply_fail(self):
+ """Ensure that the apply method fails when not executing against the proxy."""
+ self._set_args({"firmware": ["/path/to/firmware1.dlp", "/path/to/firmware/directory"]})
+ firmware = NetAppESeriesProxyFirmwareUpload()
+ firmware.is_proxy = lambda: False
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"Module can only be executed against SANtricity Web Services Proxy."):
+ firmware.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_systems.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_systems.py
new file mode 100644
index 000000000..31e078203
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_proxy_systems.py
@@ -0,0 +1,497 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible.module_utils import six
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_systems import NetAppESeriesProxySystems
+from units.compat import mock
+
+
+class StoragePoolTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "validate_certs": "no"}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_systems.NetAppESeriesProxySystems.request"
+ _REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_systems.NetAppESeriesProxySystems._request"
+ TIME_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_proxy_systems.sleep"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_valid_options_pass(self):
+ """Verify valid options."""
+ options_list = [{"password": "password", "systems": [{"ssid": "10", "serial": "021633035190"},
+ {"addresses": ["192.168.1.100"]},
+ {"serial": "021628016299"}]},
+ {"password": "password", "systems": ["021178889999", "022348016297", "021625436296"]},
+ {"password": "password", "systems": []}, {}]
+
+ for options in options_list:
+ self._set_args(options)
+ systems = NetAppESeriesProxySystems()
+
+ self._set_args(options_list[0])
+ systems = NetAppESeriesProxySystems()
+ self.assertEquals(systems.systems, [
+ {"ssid": "10", "serial": "021633035190", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": [], "embedded_available": None, "accept_certificate": False, "current_info": {}, "changes": {},
+ "updated_required": False, "failed": False, "discovered": False},
+ {"ssid": "192.168.1.100", "serial": "", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.100"], "embedded_available": None, "accept_certificate": False, "current_info": {},
+ "changes": {}, "updated_required": False, "failed": False, "discovered": False},
+ {"ssid": "021628016299", "serial": "021628016299", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": [], "embedded_available": None, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": False}])
+
+ def test_invalid_options_fail(self):
+ """Verify invalid systems option throws expected exception."""
+ self._set_args({"password": "password", "systems": [[]]})
+ with self.assertRaisesRegexp(AnsibleFailJson, "Invalid system! All systems must either be a simple serial number or a dictionary."):
+ systems = NetAppESeriesProxySystems()
+
+ def test_discover_array_pass(self):
+ """Verify the discover_array method."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ response = {"discoverProcessRunning": False, "storageSystems": [{"serialNumber": "1", "ipAddresses": ["192.168.1.5", "192.168.1.6"],
+ "supportedManagementPorts": ["https", "symbol"]},
+ {"serialNumber": "2", "ipAddresses": ["192.168.1.15", "192.168.1.16"],
+ "supportedManagementPorts": ["symbol"]},
+ {"serialNumber": "3", "ipAddresses": ["192.168.1.25", "192.168.1.26"],
+ "supportedManagementPorts": ["https", "symbol"]},
+ {"serialNumber": "4", "ipAddresses": ["192.168.1.35", "192.168.1.36"],
+ "supportedManagementPorts": ["symbol"]}]}
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}), (200, {"discoverProcessRunning": True}), (200, response)]):
+ systems.discover_array()
+ self.assertEquals(systems.systems, [
+ {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}])
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": True})
+ response = {"discoverProcessRunning": False, "storageSystems": [{"serialNumber": "1", "ipAddresses": ["192.168.1.5", "192.168.1.6"],
+ "supportedManagementPorts": ["https", "symbol"]},
+ {"serialNumber": "2", "ipAddresses": ["192.168.1.15", "192.168.1.16"],
+ "supportedManagementPorts": ["symbol"]},
+ {"serialNumber": "3", "ipAddresses": ["192.168.1.25", "192.168.1.26"],
+ "supportedManagementPorts": ["https", "symbol"]},
+ {"serialNumber": "4", "ipAddresses": ["192.168.1.35", "192.168.1.36"],
+ "supportedManagementPorts": ["symbol"]}]}
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}), (200, {"discoverProcessRunning": True}), (200, response)]):
+ systems.discover_array()
+ self.assertEquals(systems.systems, [
+ {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "3", "serial": "3", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.25", "192.168.1.26"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "4", "serial": "4", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}])
+
+ def test_discover_array_fail(self):
+ """Verify discover_array method throws expected exceptions."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": True})
+ systems = NetAppESeriesProxySystems()
+ with self.assertRaisesRegex(AnsibleFailJson, "Failed to initiate array discovery."):
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ systems.discover_array()
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": True})
+ systems = NetAppESeriesProxySystems()
+ with self.assertRaisesRegex(AnsibleFailJson, "Failed to get the discovery results."):
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}), Exception()]):
+ systems.discover_array()
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": True})
+ systems = NetAppESeriesProxySystems()
+ with self.assertRaisesRegex(AnsibleFailJson, "Timeout waiting for array discovery process."):
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"})] + [(200, {"discoverProcessRunning": True})] * 1000):
+ systems.discover_array()
+
+ def test_update_storage_systems_info_pass(self):
+ """Verify update_storage_systems_info method performs correctly."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.systems = [
+ {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, [{"id": "1", "passwordStatus": "valid", "metaTags": []},
+ {"id": "5", "passwordStatus": "valid", "metaTags": []}])):
+ systems.update_storage_systems_info()
+ self.assertEquals(systems.systems_to_remove, ["5"])
+ self.assertEquals(systems.systems_to_add, [
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False,
+ "accept_certificate": False, "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}])
+
+ def test_update_storage_systems_info_fail(self):
+ """Verify update_storage_systems_info throws expected exceptions."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.systems = [
+ {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage systems."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ systems.update_storage_systems_info()
+
+ def test_set_password_pass(self):
+ """Verify set_password completes as expected."""
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self._REQUEST_FUNC, return_value=(200, None)):
+ systems.set_password(system)
+ self.assertFalse(system["password_set"])
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self._REQUEST_FUNC, return_value=(401, None)):
+ systems.set_password(system)
+ self.assertTrue(system["password_set"])
+
+ def test_set_password_fail(self):
+ """Verify set_password throws expected exceptions."""
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self._REQUEST_FUNC, return_value=Exception()):
+ systems.set_password(system)
+ self.assertTrue(system["failed"])
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self._REQUEST_FUNC, side_effect=[(200, None), Exception(), Exception(), Exception()]):
+ systems.set_password(system)
+ self.assertTrue(system["failed"])
+
+ def test_update_system_changes_pass(self):
+ """Verify system changes."""
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.update_system_changes(system)
+ self.assertEquals(system["changes"], {})
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {"managementPaths": ["192.168.1.25", "192.168.1.6"], "metaTags": [],
+ "controllers": [{"certificateStatus": "trusted"}, {"certificateStatus": "trusted"}]},
+ "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.update_system_changes(system)
+ self.assertEquals(system["changes"], {"controllerAddresses": ["192.168.1.5", "192.168.1.6"]})
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {"managementPaths": ["192.168.1.5", "192.168.1.6"], "metaTags": [], "ip1": "192.168.1.5", "ip2": "192.168.1.6",
+ "controllers": [{"certificateStatus": "trusted"}, {"certificateStatus": "unknown"}]},
+ "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.update_system_changes(system)
+ self.assertEquals(system["changes"], {"acceptCertificate": True})
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {"managementPaths": ["192.168.1.5", "192.168.1.6"], "metaTags": [{"key": "key", "value": "1"}], "ip1": "192.168.1.5",
+ "ip2": "192.168.1.6",
+ "controllers": [{"certificateStatus": "trusted"}, {"certificateStatus": "trusted"}]},
+ "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.update_system_changes(system)
+ self.assertEquals(system["changes"], {"removeAllTags": True})
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [{"key": "key", "value": "1"}], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True,
+ "accept_certificate": True,
+ "current_info": {"managementPaths": ["192.168.1.5", "192.168.1.6"], "metaTags": [], "ip1": "192.168.1.5", "ip2": "192.168.1.6",
+ "controllers": [{"certificateStatus": "trusted"}, {"certificateStatus": "trusted"}]},
+ "changes": {}, "updated_required": False, "failed": False, "discovered": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.update_system_changes(system)
+ self.assertEquals(system["changes"], {"metaTags": [{"key": "key", "value": "1"}]})
+
+ def test_add_system_pass(self):
+ """Validate add_system method."""
+ system = {"ssid": "1", "serial": "1", "password": "password", "meta_tags": [{"key": "key", "value": "1"}],
+ "controller_addresses": ["192.168.1.5", "192.168.1.6"], "accept_certificate": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, None), (200, None)]):
+ systems.add_system(system)
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "meta_tags": [],
+ "controller_addresses": ["192.168.1.5", "192.168.1.6"], "accept_certificate": False}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, None), (200, None)]):
+ systems.add_system(system)
+
+ # Test warning situations, tests should still succeed
+ system = {"ssid": "1", "serial": "1", "password": "password", "meta_tags": [{"key": "key", "value": "1"}],
+ "controller_addresses": ["192.168.1.5", "192.168.1.6"], "accept_certificate": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[Exception(), Exception()]):
+ systems.add_system(system)
+
+ system = {"ssid": "1", "serial": "1", "password": "password", "meta_tags": [{"key": "key", "value": "1"}],
+ "controller_addresses": ["192.168.1.5", "192.168.1.6"], "accept_certificate": True}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, None), Exception()]):
+ systems.add_system(system)
+
+ def test_update_system_pass(self):
+ """Validate update_system method."""
+ system = {"ssid": "1", "changes": {}}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, None)):
+ systems.update_system(system)
+
+ system = {"ssid": "1", "changes": {}}
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ systems.update_system(system)
+
+ def test_remove_system_pass(self):
+ """Validate remove_system method."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, None)):
+ systems.remove_system("1")
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24",
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.set_password = lambda x: None
+ with mock.patch(self.TIME_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ systems.remove_system("1")
+
+ def test_apply_pass(self):
+ """Validate apply method."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": False,
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.is_embedded = lambda: False
+ systems.discover_array = lambda: None
+ systems.update_storage_systems_info = lambda: None
+ systems.update_system_changes = lambda x: None
+ systems.remove_system = lambda x: None
+ systems.add_system = lambda x: None
+ systems.update_system = lambda x: None
+ systems.systems = [{"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ systems.systems_to_remove = ["5"]
+ systems.systems_to_add = [{"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"],
+ "embedded_available": False,
+ "accept_certificate": False, "current_info": {}, "changes": {}, "updated_required": False, "failed": False,
+ "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False,
+ "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ systems.systems_to_update = [{"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"],
+ "embedded_available": False,
+ "accept_certificate": False, "current_info": {}, "changes": {}, "updated_required": False, "failed": False,
+ "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False,
+ "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ with self.assertRaisesRegexp(AnsibleExitJson, "systems added.*?systems updated.*?system removed"):
+ systems.apply()
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": False,
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.is_embedded = lambda: False
+ systems.discover_array = lambda: None
+ systems.update_storage_systems_info = lambda: None
+ systems.update_system_changes = lambda x: None
+ systems.remove_system = lambda x: None
+ systems.add_system = lambda x: None
+ systems.update_system = lambda x: None
+ systems.systems = [{"ssid": "1", "serial": "1", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.5", "192.168.1.6"], "embedded_available": True, "accept_certificate": True,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None, "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False, "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ systems.systems_to_remove = ["5"]
+ systems.systems_to_add = [{"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"],
+ "embedded_available": False,
+ "accept_certificate": False, "current_info": {}, "changes": {}, "updated_required": False, "failed": False,
+ "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False,
+ "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ systems.systems_to_update = [{"ssid": "192.168.1.36", "serial": "", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None, "meta_tags": [], "controller_addresses": ["192.168.1.35", "192.168.1.36"],
+ "embedded_available": False,
+ "accept_certificate": False, "current_info": {}, "changes": {}, "updated_required": False, "failed": False,
+ "discovered": True},
+ {"ssid": "2", "serial": "2", "password": "password", "password_valid": None, "password_set": None,
+ "stored_password_valid": None,
+ "meta_tags": [], "controller_addresses": ["192.168.1.15", "192.168.1.16"], "embedded_available": False,
+ "accept_certificate": False,
+ "current_info": {}, "changes": {}, "updated_required": False, "failed": False, "discovered": True}]
+ systems.undiscovered_systems = ["5", "6"]
+ with self.assertRaises(AnsibleFailJson):
+ systems.apply()
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": False,
+ "systems": []})
+ systems = NetAppESeriesProxySystems()
+ systems.is_embedded = lambda: False
+ systems.discover_array = lambda: None
+ systems.update_storage_systems_info = lambda: None
+ systems.update_system_changes = lambda x: None
+ systems.remove_system = lambda x: None
+ systems.add_system = lambda x: None
+ systems.systems = []
+ systems.systems_to_remove = []
+ systems.systems_to_add = []
+ systems.systems_to_update = []
+ with self.assertRaisesRegexp(AnsibleExitJson, "No changes were made."):
+ systems.apply()
+
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": False,
+ "systems": []})
+ systems = NetAppESeriesProxySystems()
+ systems.is_embedded = lambda: False
+ systems.discover_array = lambda: None
+ systems.update_storage_systems_info = lambda: None
+ systems.update_system_changes = lambda x: None
+ systems.remove_system = lambda x: None
+ systems.add_system = lambda x: None
+ systems.systems = []
+ systems.systems_to_remove = []
+ systems.systems_to_add = []
+ systems.undiscovered_systems = ["5", "6"]
+ with self.assertRaises(AnsibleFailJson):
+ systems.apply()
+
+ def test_apply_fail(self):
+ """Validate apply method throws expected exceptions."""
+ self._set_args({"password": "password", "subnet_mask": "192.168.1.0/24", "add_discovered_systems": False,
+ "systems": [{"ssid": "1", "serial": "1"}, {"addresses": ["192.168.1.36"]}, {"serial": "2"}, {"serial": "5"}]})
+ systems = NetAppESeriesProxySystems()
+ systems.is_embedded = lambda: True
+ with self.assertRaisesRegexp(AnsibleFailJson, "Cannot add/remove storage systems to SANtricity Web Services Embedded instance."):
+ systems.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_storagepool.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_storagepool.py
new file mode 100644
index 000000000..181e983ee
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_storagepool.py
@@ -0,0 +1,715 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_storagepool import NetAppESeriesStoragePool
+from units.compat.mock import patch, PropertyMock
+
+
+class StoragePoolTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "ssid": "1",
+ "validate_certs": "no"}
+
+ STORAGE_POOL_DATA = [{"raidLevel": "raidDiskPool", "volumeGroupRef": "04000000600A098000A4B28D000017805C7BD4D8",
+ "securityType": "capable",
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeGroupData": {"diskPoolData": {"reconstructionReservedDriveCount": 2}},
+ "totalRaidedSpace": "2735894167552", "name": "pool",
+ "id": "04000000600A098000A4B28D000017805C7BD4D8", "driveMediaType": "hdd"}]
+ DRIVES_DATA = [{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ED1FF0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EB1930000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EAAE30000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ECB1F0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EB2930000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ECB0B0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EC6C70000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E9BA70000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ED7CF0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ECB0F0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E72870000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E9DBB0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EAC230000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EA0BB0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EAC4B0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E7F2B0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EC9270000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EC97F0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ECBFF0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E9ED30000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EA4CF0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551EA29F0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551ECDFB0000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'hdd', 'id': '010000005000C500551E99230000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'ssd', 'id': '010000005000C500551E9ED31000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'ssd', 'id': '010000005000C500551EA4CF2000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'ssd', 'id': '010000005000C500551EA29F3000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'ssd', 'id': '010000005000C500551ECDFB4000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'},
+ {'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
+ 'driveMediaType': 'ssd', 'id': '010000005000C500551E99235000000000000000', 'fdeCapable': True,
+ 'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
+ 'phyDriveType': 'sata', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
+ 'usableCapacity': '299463129088'}]
+ RAID6_CANDIDATE_DRIVES = {"volumeCandidate": [
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "898389368832", "usableSize": "898388459520",
+ "driveCount": 5, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000"]}, "candidateSelectionType": "count",
+ "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
+ "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1197852491776", "usableSize": "1197851279360",
+ "driveCount": 6, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000"]},
+ "candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
+ "dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1497315614720", "usableSize": "1497314099200",
+ "driveCount": 7, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
+ "010000005000C500551ED1FF0000000000000000"]}, "candidateSelectionType": "count",
+ "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
+ "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1796778737664", "usableSize": "1796776919040",
+ "driveCount": 8, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
+ "010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000"]},
+ "candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
+ "dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "2096241860608", "usableSize": "2096239738880",
+ "driveCount": 9, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
+ "010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551ECDFB0000000000000000"]}, "candidateSelectionType": "count",
+ "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
+ "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
+ {"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "2395704983552", "usableSize": "2395702558720",
+ "driveCount": 10, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
+ "driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
+ "010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551ECDFB0000000000000000", "010000005000C500551E99230000000000000000"]},
+ "candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
+ "dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
+ "driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"}], "returnCode": "ok"}
+ EXPANSION_DDP_DRIVES_LIST = ["010000005000C500551ED1FF0000000000000000", "010000005000C500551E7F2B0000000000000000",
+ "010000005000C500551EC9270000000000000000", "010000005000C500551EC97F0000000000000000",
+ "010000005000C500551ECBFF0000000000000000", "010000005000C500551E9ED30000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551ECDFB0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED31000000000000000", "010000005000C500551EA4CF2000000000000000",
+ "010000005000C500551EA29F3000000000000000", "010000005000C500551ECDFB4000000000000000",
+ "010000005000C500551E99235000000000000000"]
+ EXPANSION_DDP_DRIVE_DATA = {"returnCode": "ok", "candidates": [
+ {"drives": ["010000005000C500551E7F2B0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
+ "spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "299463129088",
+ "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000"],
+ "trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
+ "usableCapacity": "598926258176", "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
+ "spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "898389387264",
+ "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000"],
+ "trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
+ "usableCapacity": "1197852516352", "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
+ "spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "1497315645440",
+ "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000"],
+ "trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
+ "usableCapacity": "1796778774528", "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
+ "spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "2096241903616",
+ "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000"],
+ "trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
+ "usableCapacity": "2395705032704", "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551ECDFB0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
+ "spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "2695168161792",
+ "driveBlockFormat": "allNative"},
+ {"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
+ "010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
+ "010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
+ "010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
+ "010000005000C500551ECDFB0000000000000000", "010000005000C500551ED1FF0000000000000000"],
+ "trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
+ "usableCapacity": "2994631290880", "driveBlockFormat": "allNative"}]}
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_storagepool.request"
+ NETAPP_REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.NetAppESeriesModule.request"
+ DRIVES_PROPERTY = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_storagepool.NetAppESeriesStoragePool.drives"
+ STORAGE_POOL_PROPERTY = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_storagepool.NetAppESeriesStoragePool.storage_pool"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def _initialize_dummy_instance(self, alt_args=None):
+ """Initialize a dummy instance of NetAppESeriesStoragePool for the purpose of testing individual methods."""
+ args = {"state": "absent", "name": "storage_pool"}
+ if alt_args:
+ args.update(alt_args)
+ self._set_args(args)
+ return NetAppESeriesStoragePool()
+
+ def test_drives_fail(self):
+ """Verify exception is thrown."""
+
+ with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
+ netapp_request.return_value = Exception()
+ storagepool = self._initialize_dummy_instance()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to fetch disk drives."):
+ drives = storagepool.drives
+
+ def test_available_drives(self):
+ """Verify all drives returned are available"""
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance()
+ self.assertEqual(storagepool.available_drives,
+ ['010000005000C500551ED1FF0000000000000000', '010000005000C500551E7F2B0000000000000000',
+ '010000005000C500551EC9270000000000000000', '010000005000C500551EC97F0000000000000000',
+ '010000005000C500551ECBFF0000000000000000', '010000005000C500551E9ED30000000000000000',
+ '010000005000C500551EA4CF0000000000000000', '010000005000C500551EA29F0000000000000000',
+ '010000005000C500551ECDFB0000000000000000', '010000005000C500551E99230000000000000000',
+ '010000005000C500551E9ED31000000000000000', '010000005000C500551EA4CF2000000000000000',
+ '010000005000C500551EA29F3000000000000000', '010000005000C500551ECDFB4000000000000000',
+ '010000005000C500551E99235000000000000000'])
+
+ def test_available_drive_types(self):
+ """Verify all drive types are returned in most common first order."""
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance()
+ self.assertEqual(storagepool.available_drive_types[0], "hdd")
+ self.assertEqual(storagepool.available_drive_types[1], "ssd")
+
+ def test_available_drive_interface_types(self):
+ """Verify all interface types are returned in most common first order."""
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance()
+ self.assertEqual(storagepool.available_drive_interface_types[0], "sas")
+ self.assertEqual(storagepool.available_drive_interface_types[1], "sata")
+
+ def test_storage_pool_drives(self):
+ """Verify storage pool drive collection."""
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "criteria_drive_count": "12", "raid_level": "raidDiskPool"})
+ storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
+ self.assertEqual(storagepool.storage_pool_drives, [
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EB1930000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EAAE30000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551ECB1F0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EB2930000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551ECB0B0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EC6C70000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551E9BA70000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551ED7CF0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551ECB0F0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551E72870000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551E9DBB0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EAC230000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EA0BB0000000000000000'},
+ {'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'}, 'fdeCapable': True,
+ 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
+ 'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
+ 'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
+ 'id': '010000005000C500551EAC4B0000000000000000'}])
+
+ def test_get_ddp_capacity(self):
+ """Evaluate returned capacity from get_ddp_capacity method."""
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "criteria_drive_count": "12", "raid_level": "raidDiskPool"})
+ storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
+ self.assertAlmostEqual(storagepool.get_ddp_capacity(self.EXPANSION_DDP_DRIVES_LIST), 6038680353645,
+ places=-2) # Allows for python version/architecture computational differences
+
+ def test_get_candidate_drives(self):
+ """Verify correct candidate list is returned."""
+ with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
+ netapp_request.return_value = (200, self.RAID6_CANDIDATE_DRIVES)
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "raid6_vg", "criteria_drive_count": "6", "raid_level": "raid6"})
+ self.assertEqual(storagepool.get_candidate_drives(),
+ {'candidateSelectionType': 'count', 'driveMediaType': 'hdd',
+ 'protectionInformationCapabilities': {'protectionInformationCapable': True,
+ 'protectionType': 'type2Protection'},
+ 'dssPreallocEnabled': False, 'phyDriveType': 'sas', 'allocateReservedSpace': False,
+ 'trayLossProtection': False, 'raidLevel': 'raid6', 'spindleSpeed': 10000,
+ 'securityType': 'capable', 'securityLevel': 'fde', 'spindleSpeedMatch': True,
+ 'driveBlockFormat': 'allNative', 'protectionInformationCapable': False,
+ 'freeExtentRef': '0000000000000000000000000000000000000000', 'driveCount': 6,
+ 'driveRefList': {'driveRef': ['010000005000C500551E7F2B0000000000000000',
+ '010000005000C500551EC9270000000000000000',
+ '010000005000C500551EC97F0000000000000000',
+ '010000005000C500551ECBFF0000000000000000',
+ '010000005000C500551E9ED30000000000000000',
+ '010000005000C500551EA4CF0000000000000000']},
+ 'rawSize': '1197852491776', 'usableSize': '1197851279360',
+ 'drawerLossProtection': False,
+ 'volumeCandidateData': {'type': 'traditional', 'diskPoolVolumeCandidateData': None}})
+
+ def test_get_expansion_candidate_drives(self):
+ """Verify correct drive list is returned"""
+ with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
+ netapp_request.return_value = (200, self.EXPANSION_DDP_DRIVE_DATA)
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "criteria_drive_count": "20", "raid_level": "raidDiskPool"})
+ storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
+ self.assertEqual(storagepool.get_expansion_candidate_drives(), [
+ {'drawerLossProtection': False, 'trayLossProtection': False,
+ 'drives': ['010000005000C500551E7F2B0000000000000000', '010000005000C500551E99230000000000000000',
+ '010000005000C500551E9ED30000000000000000', '010000005000C500551EA29F0000000000000000',
+ '010000005000C500551EA4CF0000000000000000', '010000005000C500551EC9270000000000000000'],
+ 'spindleSpeedMatch': True, 'driveBlockFormat': 'allNative', 'usableCapacity': '1796778774528',
+ 'wastedCapacity': '0'}])
+
+ def test_get_maximum_reserve_drive_count(self):
+ """Ensure maximum reserve drive count is accurately calculated."""
+ with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
+ netapp_request.return_value = (200, self.EXPANSION_DDP_DRIVE_DATA)
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "criteria_drive_count": "20", "raid_level": "raidDiskPool"})
+ storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
+ self.assertEqual(storagepool.get_maximum_reserve_drive_count(), 5)
+
+ def test_apply_check_mode_unchange(self):
+ """Verify that the changes are appropriately determined."""
+ # Absent storage pool required to be absent
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': False"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = {}
+ storagepool = self._initialize_dummy_instance(
+ {"state": "absent", "name": "not-a-pool", "erase_secured_drives": False,
+ "criteria_drive_count": "14", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.apply()
+
+ # Present storage pool with no changes
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': False"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "erase_secured_drives": False,
+ "criteria_drive_count": "14", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.apply()
+
+ def test_apply_check_mode_change(self):
+ """Verify that the changes are appropriately determined."""
+ # Remove absent storage pool
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "absent", "name": "pool", "erase_secured_drives": False, "criteria_drive_count": "14",
+ "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.apply()
+
+ # Expand present storage pool
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "erase_secured_drives": False,
+ "criteria_drive_count": "15", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.expand_storage_pool = lambda check_mode: (True, 100)
+ storagepool.migrate_raid_level = lambda check_mode: False
+ storagepool.secure_storage_pool = lambda check_mode: False
+ storagepool.set_reserve_drive_count = lambda check_mode: False
+ storagepool.apply()
+
+ # Migrate present storage pool raid level
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "erase_secured_drives": False,
+ "criteria_drive_count": "15", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.expand_storage_pool = lambda check_mode: (False, 0)
+ storagepool.migrate_raid_level = lambda check_mode: True
+ storagepool.secure_storage_pool = lambda check_mode: False
+ storagepool.set_reserve_drive_count = lambda check_mode: False
+ storagepool.apply()
+
+ # Secure present storage pool
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "erase_secured_drives": False,
+ "criteria_drive_count": "15", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.expand_storage_pool = lambda check_mode: (False, 0)
+ storagepool.migrate_raid_level = lambda check_mode: False
+ storagepool.secure_storage_pool = lambda check_mode: True
+ storagepool.set_reserve_drive_count = lambda check_mode: False
+ storagepool.apply()
+
+ # Change present storage pool reserve drive count
+ with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
+ with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
+ drives.return_value = self.DRIVES_DATA
+ with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
+ storage_pool.return_value = self.STORAGE_POOL_DATA[0]
+ storagepool = self._initialize_dummy_instance(
+ {"state": "present", "name": "pool", "erase_secured_drives": False,
+ "criteria_drive_count": "15", "raid_level": "raidDiskPool"})
+ storagepool.module.check_mode = True
+ storagepool.is_drive_count_valid = lambda x: True
+ storagepool.expand_storage_pool = lambda check_mode: (False, 0)
+ storagepool.migrate_raid_level = lambda check_mode: False
+ storagepool.secure_storage_pool = lambda check_mode: False
+ storagepool.set_reserve_drive_count = lambda check_mode: True
+ storagepool.apply()
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_syslog.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_syslog.py
new file mode 100644
index 000000000..b36278bfe
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_syslog.py
@@ -0,0 +1,128 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_syslog import NetAppESeriesSyslog
+from units.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class AsupTest(ModuleTestCase):
+ REQUIRED_PARAMS = {
+ "api_username": "rw",
+ "api_password": "password",
+ "api_url": "http://localhost",
+ }
+ REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_syslog.NetAppESeriesSyslog.request'
+ BASE_REQ_FUNC = 'ansible_collections.netapp_eseries.santricity.plugins.module_utils.santricity.request'
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_test_configuration_fail(self):
+ """Validate test_configuration fails when request exception is thrown."""
+ initial = {"state": "present",
+ "ssid": "1",
+ "address": "192.168.1.1",
+ "port": "514",
+ "protocol": "udp",
+ "components": ["auditLog"]}
+ self._set_args(initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesSyslog()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, r"We failed to send test message!"):
+ with mock.patch(self.REQ_FUNC, return_value=Exception()):
+ syslog.test_configuration(self.REQUIRED_PARAMS)
+
+ def test_update_configuration_record_match_pass(self):
+ """Verify existing syslog server record match does not issue update request."""
+ initial = {"state": "present",
+ "ssid": "1",
+ "address": "192.168.1.1",
+ "port": "514",
+ "protocol": "udp",
+ "components": ["auditLog"]}
+ expected = [{"id": "123456",
+ "serverAddress": "192.168.1.1",
+ "port": 514,
+ "protocol": "udp",
+ "components": [{"type": "auditLog"}]}]
+
+ self._set_args(initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesSyslog()
+
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, None)]):
+ updated = syslog.update_configuration()
+ self.assertFalse(updated)
+
+ def test_update_configuration_record_partial_match_pass(self):
+ """Verify existing syslog server record partial match results in an update request."""
+ initial = {"state": "present",
+ "ssid": "1",
+ "address": "192.168.1.1",
+ "port": "514",
+ "protocol": "tcp",
+ "components": ["auditLog"]}
+ expected = [{"id": "123456",
+ "serverAddress": "192.168.1.1",
+ "port": 514,
+ "protocol": "udp",
+ "components": [{"type": "auditLog"}]}]
+
+ self._set_args(initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesSyslog()
+
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, None)]):
+ updated = syslog.update_configuration()
+ self.assertTrue(updated)
+
+ def test_update_configuration_record_no_match_pass(self):
+ """Verify existing syslog server record partial match results in an update request."""
+ initial = {"state": "present",
+ "ssid": "1",
+ "address": "192.168.1.1",
+ "port": "514",
+ "protocol": "tcp",
+ "components": ["auditLog"]}
+ expected = [{"id": "123456",
+ "serverAddress": "192.168.1.100",
+ "port": 514,
+ "protocol": "udp",
+ "components": [{"type": "auditLog"}]}]
+
+ self._set_args(initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesSyslog()
+
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, dict(id=1234))]):
+ updated = syslog.update_configuration()
+ self.assertTrue(updated)
+
+ def test_update_configuration_record_no_match_defaults_pass(self):
+ """Verify existing syslog server record partial match results in an update request."""
+ initial = {"state": "present",
+ "ssid": "1",
+ "address": "192.168.1.1",
+ "port": "514",
+ "protocol": "tcp",
+ "components": ["auditLog"]}
+ expected = [{"id": "123456",
+ "serverAddress": "192.168.1.100",
+ "port": 514,
+ "protocol": "udp",
+ "components": [{"type": "auditLog"}]}]
+
+ self._set_args(initial)
+ with mock.patch(self.BASE_REQ_FUNC, side_effect=[(200, {"version": "04.00.00.00"}), (200, {"runningAsProxy": False})]):
+ syslog = NetAppESeriesSyslog()
+
+ with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, dict(id=1234))]):
+ updated = syslog.update_configuration()
+ self.assertTrue(updated)
diff --git a/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_volume.py b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_volume.py
new file mode 100644
index 000000000..4bf547b38
--- /dev/null
+++ b/ansible_collections/netapp_eseries/santricity/tests/unit/modules/test_na_santricity_volume.py
@@ -0,0 +1,864 @@
+# (c) 2020, NetApp, Inc
+# BSD-3 Clause (see COPYING or https://opensource.org/licenses/BSD-3-Clause)
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_volume import NetAppESeriesVolume
+from units.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
+from units.compat import mock
+
+
+class NetAppESeriesVolumeTest(ModuleTestCase):
+ REQUIRED_PARAMS = {"api_username": "username",
+ "api_password": "password",
+ "api_url": "http://localhost/devmgr/v2",
+ "ssid": "1",
+ "validate_certs": "no"}
+
+ THIN_VOLUME_RESPONSE = [{"capacity": "1288490188800",
+ "volumeRef": "3A000000600A098000A4B28D000010475C405428",
+ "status": "optimal",
+ "protectionType": "type1Protection",
+ "maxVirtualCapacity": "281474976710656",
+ "initialProvisionedCapacity": "4294967296",
+ "currentProvisionedCapacity": "4294967296",
+ "provisionedCapacityQuota": "1305670057984",
+ "growthAlertThreshold": 85,
+ "expansionPolicy": "automatic",
+ "flashCached": False,
+ "metadata": [{"key": "workloadId", "value": "4200000001000000000000000000000000000000"},
+ {"key": "volumeTypeId", "value": "volume"}],
+ "dataAssurance": True,
+ "segmentSize": 131072,
+ "diskPool": True,
+ "listOfMappings": [],
+ "mapped": False,
+ "currentControllerId": "070000000000000000000001",
+ "cacheSettings": {"readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 0},
+ "name": "thin_volume",
+ "id": "3A000000600A098000A4B28D000010475C405428"}]
+ VOLUME_GET_RESPONSE = [{"offline": False,
+ "raidLevel": "raid6",
+ "capacity": "214748364800",
+ "reconPriority": 1,
+ "segmentSize": 131072,
+ "volumeRef": "02000000600A098000A4B9D100000F095C2F7F31",
+ "status": "optimal",
+ "protectionInformationCapable": False,
+ "protectionType": "type0Protection",
+ "diskPool": True,
+ "flashCached": False,
+ "metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
+ {"key": "volumeTypeId", "value": "Clare"}],
+ "dataAssurance": False,
+ "currentControllerId": "070000000000000000000002",
+ "cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
+ "readAheadMultiplier": 0},
+ "thinProvisioned": False,
+ "totalSizeInBytes": "214748364800",
+ "name": "Matthew",
+ "id": "02000000600A098000A4B9D100000F095C2F7F31"},
+ {"offline": False,
+ "raidLevel": "raid6",
+ "capacity": "107374182400",
+ "reconPriority": 1,
+ "segmentSize": 131072,
+ "volumeRef": "02000000600A098000A4B28D00000FBE5C2F7F26",
+ "status": "optimal",
+ "protectionInformationCapable": False,
+ "protectionType": "type0Protection",
+ "diskPool": True,
+ "flashCached": False,
+ "metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
+ {"key": "volumeTypeId", "value": "Samantha"}],
+ "dataAssurance": False,
+ "currentControllerId": "070000000000000000000001",
+ "cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
+ "readAheadMultiplier": 0},
+ "thinProvisioned": False,
+ "totalSizeInBytes": "107374182400",
+ "name": "Samantha",
+ "id": "02000000600A098000A4B28D00000FBE5C2F7F26"},
+ {"offline": False,
+ "raidLevel": "raid6",
+ "capacity": "107374182400",
+ "segmentSize": 131072,
+ "volumeRef": "02000000600A098000A4B9D100000F0B5C2F7F40",
+ "status": "optimal",
+ "protectionInformationCapable": False,
+ "protectionType": "type0Protection",
+ "volumeGroupRef": "04000000600A098000A4B9D100000F085C2F7F26",
+ "diskPool": True,
+ "flashCached": False,
+ "metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
+ {"key": "volumeTypeId", "value": "Micah"}],
+ "dataAssurance": False,
+ "currentControllerId": "070000000000000000000002",
+ "cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
+ "readAheadMultiplier": 0},
+ "thinProvisioned": False,
+ "totalSizeInBytes": "107374182400",
+ "name": "Micah",
+ "id": "02000000600A098000A4B9D100000F0B5C2F7F40"}]
+ STORAGE_POOL_GET_RESPONSE = [{"offline": False,
+ "raidLevel": "raidDiskPool",
+ "volumeGroupRef": "04000000600A",
+ "securityType": "capable",
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeGroupData": {"type": "diskPool",
+ "diskPoolData": {"reconstructionReservedDriveCount": 1,
+ "reconstructionReservedAmt": "296889614336",
+ "reconstructionReservedDriveCountCurrent": 1,
+ "poolUtilizationWarningThreshold": 0,
+ "poolUtilizationCriticalThreshold": 85,
+ "poolUtilizationState": "utilizationOptimal",
+ "unusableCapacity": "0",
+ "degradedReconstructPriority": "high",
+ "criticalReconstructPriority": "highest",
+ "backgroundOperationPriority": "low",
+ "allocGranularity": "4294967296"}},
+ "reservedSpaceAllocated": False,
+ "securityLevel": "fde",
+ "usedSpace": "863288426496",
+ "totalRaidedSpace": "2276332666880",
+ "raidStatus": "optimal",
+ "freeSpace": "1413044240384",
+ "drivePhysicalType": "sas",
+ "driveMediaType": "hdd",
+ "diskPool": True,
+ "id": "04000000600A098000A4B9D100000F085C2F7F26",
+ "name": "employee_data_storage_pool"},
+ {"offline": False,
+ "raidLevel": "raid1",
+ "volumeGroupRef": "04000000600A098000A4B28D00000FBD5C2F7F19",
+ "state": "complete",
+ "securityType": "capable",
+ "drawerLossProtection": False,
+ "protectionInformationCapable": False,
+ "protectionInformationCapabilities": {"protectionInformationCapable": True,
+ "protectionType": "type2Protection"},
+ "volumeGroupData": {"type": "unknown", "diskPoolData": None},
+ "reservedSpaceAllocated": False,
+ "securityLevel": "fde",
+ "usedSpace": "322122547200",
+ "totalRaidedSpace": "598926258176",
+ "raidStatus": "optimal",
+ "freeSpace": "276803710976",
+ "drivePhysicalType": "sas",
+ "driveMediaType": "hdd",
+ "diskPool": False,
+ "id": "04000000600A098000A4B28D00000FBD5C2F7F19",
+ "name": "database_storage_pool"}]
+
+ GET_LONG_LIVED_OPERATION_RESPONSE = [
+ {"returnCode": "ok",
+ "longLivedOpsProgress": [
+ {"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
+ "format": None, "volCreation": None, "volDeletion": None},
+ {"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
+ "volCreation": None, "volDeletion": None}]},
+ {"returnCode": "ok",
+ "longLivedOpsProgress": [
+ {"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
+ "format": None, "volCreation": None, "volDeletion": None},
+ {"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
+ "volCreation": None, "volDeletion": None}]},
+ {"returnCode": "ok",
+ "longLivedOpsProgress": [
+ {"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
+ "format": None, "volCreation": None, "volDeletion": None},
+ {"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
+ "volCreation": None, "volDeletion": None}]},
+ {"returnCode": "ok",
+ "longLivedOpsProgress": [
+ {"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
+ "format": None, "volCreation": None, "volDeletion": None},
+ {"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
+ "init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
+ "volCreation": None, "volDeletion": None}]}]
+
+ WORKLOAD_GET_RESPONSE = [{"id": "4200000001000000000000000000000000000000", "name": "general_workload_1",
+ "workloadAttributes": [{"key": "profileId", "value": "Other_1"}]},
+ {"id": "4200000002000000000000000000000000000000", "name": "employee_data",
+ "workloadAttributes": [{"key": "use", "value": "EmployeeData"},
+ {"key": "location", "value": "ICT"},
+ {"key": "private", "value": "public"},
+ {"key": "profileId", "value": "ansible_workload_1"}]},
+ {"id": "4200000003000000000000000000000000000000", "name": "customer_database",
+ "workloadAttributes": [{"key": "use", "value": "customer_information"},
+ {"key": "location", "value": "global"},
+ {"key": "profileId", "value": "ansible_workload_2"}]},
+ {"id": "4200000004000000000000000000000000000000", "name": "product_database",
+ "workloadAttributes": [{"key": "use", "value": "production_information"},
+ {"key": "security", "value": "private"},
+ {"key": "location", "value": "global"},
+ {"key": "profileId", "value": "ansible_workload_4"}]}]
+
+ REQUEST_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_volume.NetAppESeriesVolume.request"
+ GET_VOLUME_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_volume.NetAppESeriesVolume.get_volume"
+ SLEEP_FUNC = "ansible_collections.netapp_eseries.santricity.plugins.modules.na_santricity_volume.time.sleep"
+
+ def _set_args(self, args=None):
+ module_args = self.REQUIRED_PARAMS.copy()
+ if args is not None:
+ module_args.update(args)
+ set_module_args(module_args)
+
+ def test_module_arguments_pass(self):
+ """Ensure valid arguments successful create a class instance."""
+ arg_sets = [{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 10},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "gb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1024,
+ "thin_volume_growth_alert_threshold": 99},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "gb",
+ "thin_provision": True, "thin_volume_repo_size": 64},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "kb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 67108864}]
+
+ # validate size normalization
+ for arg_set in arg_sets:
+ self._set_args(arg_set)
+ volume_object = NetAppESeriesVolume()
+
+ self.assertEqual(volume_object.size_b, volume_object.convert_to_aligned_bytes(arg_set["size"]))
+ self.assertEqual(volume_object.thin_volume_repo_size_b, volume_object.convert_to_aligned_bytes(arg_set["thin_volume_repo_size"]))
+ self.assertEqual(volume_object.thin_volume_expansion_policy, "automatic")
+ if "thin_volume_max_repo_size" not in arg_set.keys():
+ self.assertEqual(volume_object.thin_volume_max_repo_size_b, volume_object.convert_to_aligned_bytes(arg_set["size"]))
+ else:
+ self.assertEqual(volume_object.thin_volume_max_repo_size_b,
+ volume_object.convert_to_aligned_bytes(arg_set["thin_volume_max_repo_size"]))
+
+ # validate metadata form
+ self._set_args(
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10, "workload_name": "workload1",
+ "metadata": {"availability": "public", "security": "low"}})
+ volume_object = NetAppESeriesVolume()
+ for entry in volume_object.metadata:
+ self.assertTrue(entry in [{'value': 'low', 'key': 'security'}, {'value': 'public', 'key': 'availability'}])
+
+ def test_module_arguments_fail(self):
+ """Ensure invalid arguments values do not create a class instance."""
+ arg_sets = [{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
+ "thin_provision": True, "thin_volume_repo_size": 260},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "tb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 10},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "gb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 9},
+ {"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "gb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 100}]
+
+ for arg_set in arg_sets:
+ with self.assertRaises(AnsibleFailJson):
+ self._set_args(arg_set)
+ print(arg_set)
+ volume_object = NetAppESeriesVolume()
+
+ def test_get_volume_pass(self):
+ """Evaluate the get_volume method."""
+ with mock.patch(self.REQUEST_FUNC,
+ side_effect=[(200, self.VOLUME_GET_RESPONSE), (200, self.THIN_VOLUME_RESPONSE)]):
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ self.assertEqual(volume_object.get_volume(),
+ [entry for entry in self.VOLUME_GET_RESPONSE if entry["name"] == "Matthew"][0])
+
+ with mock.patch(self.REQUEST_FUNC,
+ side_effect=[(200, self.VOLUME_GET_RESPONSE), (200, self.THIN_VOLUME_RESPONSE)]):
+ self._set_args({"state": "present", "name": "NotAVolume", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ self.assertEqual(volume_object.get_volume(), {})
+
+ def test_get_volume_fail(self):
+ """Evaluate the get_volume exception paths."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of thick volumes."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_volume()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of thin volumes."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.VOLUME_GET_RESPONSE), Exception()]):
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_volume()
+
+ def tests_wait_for_volume_availability_pass(self):
+ """Ensure wait_for_volume_availability completes as expected."""
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
+ "wait_for_initialization": True})
+ volume_object = NetAppESeriesVolume()
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.GET_VOLUME_FUNC, side_effect=[False, False, True]):
+ volume_object.wait_for_volume_availability()
+
+ def tests_wait_for_volume_availability_fail(self):
+ """Ensure wait_for_volume_availability throws the expected exceptions."""
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
+ "wait_for_initialization": True})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_volume = lambda: False
+ with self.assertRaisesRegexp(AnsibleFailJson, "Timed out waiting for the volume"):
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ volume_object.wait_for_volume_availability()
+
+ def tests_wait_for_volume_action_pass(self):
+ """Ensure wait_for_volume_action completes as expected."""
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
+ "wait_for_initialization": True})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315D494C6F",
+ "storageVolumeRef": "02000000600A098000A4B9D1000037315DXXXXXX"}
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[1]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[2]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[3])]):
+ volume_object.wait_for_volume_action()
+
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
+ "wait_for_initialization": True})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315DXXXXXX",
+ "storageVolumeRef": "02000000600A098000A4B9D1000037315D494C6F"}
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[1]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[2]),
+ (200, self.GET_LONG_LIVED_OPERATION_RESPONSE[3])]):
+ volume_object.wait_for_volume_action()
+
+ def tests_wait_for_volume_action_fail(self):
+ """Ensure wait_for_volume_action throws the expected exceptions."""
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
+ "wait_for_initialization": True})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315DXXXXXX",
+ "storageVolumeRef": "02000000600A098000A4B9D1000037315D494C6F"}
+ with mock.patch(self.SLEEP_FUNC, return_value=None):
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get volume expansion progress."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.wait_for_volume_action()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Expansion action failed to complete."):
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0])):
+ volume_object.wait_for_volume_action(timeout=300)
+
+ def test_get_storage_pool_pass(self):
+ """Evaluate the get_storage_pool method."""
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, self.STORAGE_POOL_GET_RESPONSE)):
+ self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool",
+ "size": 100})
+ volume_object = NetAppESeriesVolume()
+ self.assertEqual(volume_object.get_storage_pool(), [entry for entry in self.STORAGE_POOL_GET_RESPONSE if
+ entry["name"] == "employee_data_storage_pool"][0])
+
+ self._set_args(
+ {"state": "present", "name": "NewVolume", "storage_pool_name": "NotAStoragePool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ self.assertEqual(volume_object.get_storage_pool(), {})
+
+ def test_get_storage_pool_fail(self):
+ """Evaluate the get_storage_pool exception paths."""
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of storage pools."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_storage_pool()
+
+ def test_check_storage_pool_sufficiency_pass(self):
+ """Ensure passing logic."""
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = [entry for entry in self.STORAGE_POOL_GET_RESPONSE
+ if entry["name"] == "employee_data_storage_pool"][0]
+ volume_object.check_storage_pool_sufficiency()
+
+ def test_check_storage_pool_sufficiency_fail(self):
+ """Validate exceptions are thrown for insufficient storage pool resources."""
+ self._set_args({"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
+ "thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 10})
+ volume_object = NetAppESeriesVolume()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "Requested storage pool"):
+ volume_object.check_storage_pool_sufficiency()
+
+ with self.assertRaisesRegexp(AnsibleFailJson,
+ "Thin provisioned volumes can only be created on raid disk pools."):
+ volume_object.pool_detail = [entry for entry in self.STORAGE_POOL_GET_RESPONSE
+ if entry["name"] == "database_storage_pool"][0]
+ volume_object.volume_detail = {}
+ volume_object.check_storage_pool_sufficiency()
+
+ with self.assertRaisesRegexp(AnsibleFailJson, "requires the storage pool to be DA-compatible."):
+ volume_object.pool_detail = {"diskPool": True,
+ "protectionInformationCapabilities": {"protectionType": "type0Protection",
+ "protectionInformationCapable": False}}
+ volume_object.volume_detail = {}
+ volume_object.data_assurance_enabled = True
+ volume_object.check_storage_pool_sufficiency()
+
+ volume_object.pool_detail = {"diskPool": True,
+ "protectionInformationCapabilities": {"protectionType": "type2Protection",
+ "protectionInformationCapable": True}}
+ volume_object.check_storage_pool_sufficiency()
+
+ self._set_args({"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
+ "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson,
+ "Not enough storage pool free space available for the volume's needs."):
+ volume_object.pool_detail = {"freeSpace": 10, "diskPool": True,
+ "protectionInformationCapabilities": {"protectionType": "type2Protection",
+ "protectionInformationCapable": True}}
+ volume_object.volume_detail = {"totalSizeInBytes": 100}
+ volume_object.data_assurance_enabled = True
+ volume_object.size_b = 1
+ volume_object.check_storage_pool_sufficiency()
+
+ def test_update_workload_tags_pass(self):
+ """Validate updating workload tags."""
+ test_sets = [[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100}, False],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "employee_data"}, False],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "customer_database",
+ "metadata": {"use": "customer_information", "location": "global"}}, False],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "customer_database",
+ "metadata": {"use": "customer_information"}}, True],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "customer_database",
+ "metadata": {"use": "customer_information", "location": "local"}}, True],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "customer_database",
+ "metadata": {"use": "customer_information", "location": "global", "importance": "no"}}, True],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "newWorkload",
+ "metadata": {"for_testing": "yes"}}, True],
+ [{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "newWorkload"}, True]]
+
+ for test in test_sets:
+ self._set_args(test[0])
+ volume_object = NetAppESeriesVolume()
+
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), (200, {"id": 1})]):
+ self.assertEqual(volume_object.update_workload_tags(), test[1])
+
+ def test_update_workload_tags_fail(self):
+ """Validate updating workload tags fails appropriately."""
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "employee_data"})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage array workload tags."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.update_workload_tags()
+
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "employee_data", "metadata": {"key": "not-use", "value": "EmployeeData"}})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create new workload tag."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), Exception()]):
+ volume_object.update_workload_tags()
+
+ self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
+ "workload_name": "employee_data2", "metadata": {"key": "use", "value": "EmployeeData"}})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create new workload tag."):
+ with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), Exception()]):
+ volume_object.update_workload_tags()
+
+ def test_get_volume_property_changes_pass(self):
+ """Verify correct dictionary is returned"""
+
+ # no property changes
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True,
+ "read_ahead_enable": True, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 1}, "flashCached": True,
+ "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(), dict())
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True,
+ "read_ahead_enable": True, "thin_provision": True, "thin_volume_repo_size": 64,
+ "thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 1},
+ "flashCached": True, "growthAlertThreshold": "90",
+ "expansionPolicy": "automatic", "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(), dict())
+
+ # property changes
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True,
+ "read_ahead_enable": True, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": False, "writeCacheEnable": True,
+ "readAheadMultiplier": 1}, "flashCached": True,
+ "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(),
+ {"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
+ 'flashCache': True})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": False,
+ "read_ahead_enable": True, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": False,
+ "readAheadMultiplier": 1}, "flashCached": True,
+ "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(),
+ {"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
+ 'flashCache': True})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": True,
+ "read_ahead_enable": True, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 1}, "flashCached": False,
+ "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(),
+ {"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True, "cacheWithoutBatteries": True},
+ 'flashCache': True})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": True,
+ "read_ahead_enable": False, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 1}, "flashCached": False,
+ "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(), {"metaTags": [],
+ 'cacheSettings': {'readCacheEnable': True,
+ 'writeCacheEnable': True,
+ 'readAheadEnable': False,
+ "cacheWithoutBatteries": True},
+ 'flashCache': True})
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True,
+ "read_ahead_enable": True, "thin_provision": True, "thin_volume_repo_size": 64,
+ "thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"metadata": [],
+ "cacheSettings": {"cwob": True, "readCacheEnable": True, "writeCacheEnable": True,
+ "readAheadMultiplier": 1},
+ "flashCached": True, "growthAlertThreshold": "95",
+ "expansionPolicy": "automatic", "segmentSize": str(128 * 1024)}
+ self.assertEqual(volume_object.get_volume_property_changes(),
+ {"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
+ 'growthAlertThreshold': 90, 'flashCache': True})
+
+ def test_get_volume_property_changes_fail(self):
+ """Verify correct exception is thrown"""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
+ "read_cache_enable": True, "write_cache_enable": True, "read_ahead_enable": True, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {
+ "cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True, "readAheadMultiplier": 1},
+ "flashCached": True, "segmentSize": str(512 * 1024)}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Existing volume segment size is"):
+ volume_object.get_volume_property_changes()
+
+ def test_get_expand_volume_changes_pass(self):
+ """Verify expansion changes."""
+ # thick volumes
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(50 * 1024 * 1024 * 1024), "thinProvisioned": False}
+ self.assertEqual(volume_object.get_expand_volume_changes(),
+ {"sizeUnit": "bytes", "expansionSize": 100 * 1024 * 1024 * 1024})
+
+ # thin volumes
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "automatic", "thin_volume_repo_size": 64,
+ "thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(50 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "automatic",
+ "provisionedCapacityQuota": str(1000 * 1024 * 1024 * 1024)}
+ self.assertEqual(volume_object.get_expand_volume_changes(),
+ {"sizeUnit": "bytes", "newVirtualSize": 100 * 1024 * 1024 * 1024})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "automatic", "thin_volume_repo_size": 64,
+ "thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "automatic",
+ "provisionedCapacityQuota": str(500 * 1024 * 1024 * 1024)}
+ self.assertEqual(volume_object.get_expand_volume_changes(),
+ {"sizeUnit": "bytes", "newRepositorySize": 1000 * 1024 * 1024 * 1024})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 504, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "manual",
+ "currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
+ self.assertEqual(volume_object.get_expand_volume_changes(),
+ {"sizeUnit": "bytes", "newRepositorySize": 504 * 1024 * 1024 * 1024})
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 756, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "manual",
+ "currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
+ self.assertEqual(volume_object.get_expand_volume_changes(),
+ {"sizeUnit": "bytes", "newRepositorySize": 756 * 1024 * 1024 * 1024})
+
+ def test_get_expand_volume_changes_fail(self):
+ """Verify exceptions are thrown."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(1000 * 1024 * 1024 * 1024)}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Reducing the size of volumes is not permitted."):
+ volume_object.get_expand_volume_changes()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 502, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "manual",
+ "currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
+ with self.assertRaisesRegexp(AnsibleFailJson, "The thin volume repository increase must be between or equal"):
+ volume_object.get_expand_volume_changes()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
+ "expansionPolicy": "manual",
+ "currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
+ with self.assertRaisesRegexp(AnsibleFailJson, "The thin volume repository increase must be between or equal"):
+ volume_object.get_expand_volume_changes()
+
+ def test_create_volume_pass(self):
+ """Verify volume creation."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.create_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.create_volume()
+
+ def test_create_volume_fail(self):
+ """Verify exceptions thrown."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.create_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create thin volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.create_volume()
+
+ def test_update_volume_properties_pass(self):
+ """verify property update."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ volume_object.wait_for_volume_availability = lambda: None
+ volume_object.get_volume = lambda: {"id": "12345'"}
+ volume_object.get_volume_property_changes = lambda: {
+ 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
+ 'flashCached': True}
+ volume_object.workload_id = "4200000001000000000000000000000000000000"
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ self.assertTrue(volume_object.update_volume_properties())
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ volume_object.wait_for_volume_availability = lambda: None
+ volume_object.get_volume = lambda: {"id": "12345'"}
+ volume_object.get_volume_property_changes = lambda: {
+ 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
+ 'flashCached': True}
+ volume_object.workload_id = "4200000001000000000000000000000000000000"
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ self.assertTrue(volume_object.update_volume_properties())
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"metadata": [{"key": "workloadId", "value": "12345"}]}
+ volume_object.wait_for_volume_availability = lambda: None
+ volume_object.get_volume = lambda: {"id": "12345'"}
+ volume_object.get_volume_property_changes = lambda: {}
+ volume_object.workload_id = "4200000001000000000000000000000000000000"
+ self.assertFalse(volume_object.update_volume_properties())
+
+ def test_update_volume_properties_fail(self):
+ """Verify exceptions are thrown."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ volume_object.wait_for_volume_availability = lambda: None
+ volume_object.get_volume = lambda: {"id": "12345'"}
+ volume_object.get_volume_property_changes = lambda: {
+ 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
+ 'flashCached': True}
+ volume_object.workload_id = "4200000001000000000000000000000000000000"
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update volume properties."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ self.assertTrue(volume_object.update_volume_properties())
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.pool_detail = {"id": "12345"}
+ volume_object.wait_for_volume_availability = lambda: None
+ volume_object.get_volume = lambda: {"id": "12345'"}
+ volume_object.get_volume_property_changes = lambda: {
+ 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
+ 'flashCached': True}
+ volume_object.workload_id = "4200000001000000000000000000000000000000"
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update thin volume properties."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ self.assertTrue(volume_object.update_volume_properties())
+
+ def test_expand_volume_pass(self):
+ """Verify volume expansion."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
+ "expansionSize": 100 * 1024 * 1024 * 1024}
+ volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.expand_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
+ "expansionSize": 100 * 1024 * 1024 * 1024}
+ volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.expand_volume()
+
+ def test_expand_volume_fail(self):
+ """Verify exceptions are thrown."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
+ "expansionSize": 100 * 1024 * 1024 * 1024}
+ volume_object.volume_detail = {"id": "12345", "thinProvisioned": False}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to expand volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.expand_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True})
+ volume_object = NetAppESeriesVolume()
+ volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
+ "expansionSize": 100 * 1024 * 1024 * 1024}
+ volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to expand thin volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.expand_volume()
+
+ def test_delete_volume_pass(self):
+ """Verify volume deletion."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"id": "12345"}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.delete_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
+ "thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
+ "thin_volume_growth_alert_threshold": 90})
+ volume_object = NetAppESeriesVolume()
+ volume_object.volume_detail = {"id": "12345"}
+ with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
+ volume_object.delete_volume()
+
+ def test_delete_volume_fail(self):
+ """Verify exceptions are thrown."""
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to delete volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.delete_volume()
+
+ self._set_args(
+ {"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True})
+ volume_object = NetAppESeriesVolume()
+ with self.assertRaisesRegexp(AnsibleFailJson, "Failed to delete thin volume."):
+ with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
+ volume_object.delete_volume()